code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def call_heat(tstat):
"""
Adjusts the temperature setpoints in order to call for heating. Returns
a handler to call when you want to reset the thermostat
"""
current_hsp, current_csp = tstat.heating_setpoint, tstat.cooling_setpoint
current_temp = tstat.temperature
tstat.write({
'heating_setpoint': current_temp+10,
'cooling_setpoint': current_temp+20,
'mode': HEAT,
})
def restore():
tstat.write({
'heating_setpoint': current_hsp,
'cooling_setpoint': current_csp,
'mode': AUTO,
})
return restore
|
def function[call_heat, parameter[tstat]]:
constant[
Adjusts the temperature setpoints in order to call for heating. Returns
a handler to call when you want to reset the thermostat
]
<ast.Tuple object at 0x7da18fe93df0> assign[=] tuple[[<ast.Attribute object at 0x7da20cabd6c0>, <ast.Attribute object at 0x7da20c7963e0>]]
variable[current_temp] assign[=] name[tstat].temperature
call[name[tstat].write, parameter[dictionary[[<ast.Constant object at 0x7da204347370>, <ast.Constant object at 0x7da204345690>, <ast.Constant object at 0x7da204344640>], [<ast.BinOp object at 0x7da204345990>, <ast.BinOp object at 0x7da204347910>, <ast.Name object at 0x7da204344310>]]]]
def function[restore, parameter[]]:
call[name[tstat].write, parameter[dictionary[[<ast.Constant object at 0x7da204346620>, <ast.Constant object at 0x7da204346860>, <ast.Constant object at 0x7da204345090>], [<ast.Name object at 0x7da2043468f0>, <ast.Name object at 0x7da204345810>, <ast.Name object at 0x7da204344c70>]]]]
return[name[restore]]
|
keyword[def] identifier[call_heat] ( identifier[tstat] ):
literal[string]
identifier[current_hsp] , identifier[current_csp] = identifier[tstat] . identifier[heating_setpoint] , identifier[tstat] . identifier[cooling_setpoint]
identifier[current_temp] = identifier[tstat] . identifier[temperature]
identifier[tstat] . identifier[write] ({
literal[string] : identifier[current_temp] + literal[int] ,
literal[string] : identifier[current_temp] + literal[int] ,
literal[string] : identifier[HEAT] ,
})
keyword[def] identifier[restore] ():
identifier[tstat] . identifier[write] ({
literal[string] : identifier[current_hsp] ,
literal[string] : identifier[current_csp] ,
literal[string] : identifier[AUTO] ,
})
keyword[return] identifier[restore]
|
def call_heat(tstat):
"""
Adjusts the temperature setpoints in order to call for heating. Returns
a handler to call when you want to reset the thermostat
"""
(current_hsp, current_csp) = (tstat.heating_setpoint, tstat.cooling_setpoint)
current_temp = tstat.temperature
tstat.write({'heating_setpoint': current_temp + 10, 'cooling_setpoint': current_temp + 20, 'mode': HEAT})
def restore():
tstat.write({'heating_setpoint': current_hsp, 'cooling_setpoint': current_csp, 'mode': AUTO})
return restore
|
def _report_exc_info(exc_info, request, extra_data, payload_data, level=None):
"""
Called by report_exc_info() wrapper
"""
if not _check_config():
return
filtered_level = _filtered_level(exc_info[1])
if level is None:
level = filtered_level
filtered_exc_info = events.on_exception_info(exc_info,
request=request,
extra_data=extra_data,
payload_data=payload_data,
level=level)
if filtered_exc_info is False:
return
cls, exc, trace = filtered_exc_info
data = _build_base_data(request)
if level is not None:
data['level'] = level
# walk the trace chain to collect cause and context exceptions
trace_chain = _walk_trace_chain(cls, exc, trace)
extra_trace_data = None
if len(trace_chain) > 1:
data['body'] = {
'trace_chain': trace_chain
}
if payload_data and ('body' in payload_data) and ('trace' in payload_data['body']):
extra_trace_data = payload_data['body']['trace']
del payload_data['body']['trace']
else:
data['body'] = {
'trace': trace_chain[0]
}
if extra_data:
extra_data = extra_data
if not isinstance(extra_data, dict):
extra_data = {'value': extra_data}
if extra_trace_data:
extra_data = dict_merge(extra_data, extra_trace_data)
data['custom'] = extra_data
if extra_trace_data and not extra_data:
data['custom'] = extra_trace_data
request = _get_actual_request(request)
_add_request_data(data, request)
_add_person_data(data, request)
_add_lambda_context_data(data)
data['server'] = _build_server_data()
if payload_data:
data = dict_merge(data, payload_data)
payload = _build_payload(data)
send_payload(payload, payload.get('access_token'))
return data['uuid']
|
def function[_report_exc_info, parameter[exc_info, request, extra_data, payload_data, level]]:
constant[
Called by report_exc_info() wrapper
]
if <ast.UnaryOp object at 0x7da1b0297220> begin[:]
return[None]
variable[filtered_level] assign[=] call[name[_filtered_level], parameter[call[name[exc_info]][constant[1]]]]
if compare[name[level] is constant[None]] begin[:]
variable[level] assign[=] name[filtered_level]
variable[filtered_exc_info] assign[=] call[name[events].on_exception_info, parameter[name[exc_info]]]
if compare[name[filtered_exc_info] is constant[False]] begin[:]
return[None]
<ast.Tuple object at 0x7da1b0294f10> assign[=] name[filtered_exc_info]
variable[data] assign[=] call[name[_build_base_data], parameter[name[request]]]
if compare[name[level] is_not constant[None]] begin[:]
call[name[data]][constant[level]] assign[=] name[level]
variable[trace_chain] assign[=] call[name[_walk_trace_chain], parameter[name[cls], name[exc], name[trace]]]
variable[extra_trace_data] assign[=] constant[None]
if compare[call[name[len], parameter[name[trace_chain]]] greater[>] constant[1]] begin[:]
call[name[data]][constant[body]] assign[=] dictionary[[<ast.Constant object at 0x7da1b02960b0>], [<ast.Name object at 0x7da1b0294400>]]
if <ast.BoolOp object at 0x7da1b0294430> begin[:]
variable[extra_trace_data] assign[=] call[call[name[payload_data]][constant[body]]][constant[trace]]
<ast.Delete object at 0x7da1b0297130>
if name[extra_data] begin[:]
variable[extra_data] assign[=] name[extra_data]
if <ast.UnaryOp object at 0x7da1b0314250> begin[:]
variable[extra_data] assign[=] dictionary[[<ast.Constant object at 0x7da1b0316470>], [<ast.Name object at 0x7da1b0315450>]]
if name[extra_trace_data] begin[:]
variable[extra_data] assign[=] call[name[dict_merge], parameter[name[extra_data], name[extra_trace_data]]]
call[name[data]][constant[custom]] assign[=] name[extra_data]
if <ast.BoolOp object at 0x7da1b0316770> begin[:]
call[name[data]][constant[custom]] assign[=] name[extra_trace_data]
variable[request] assign[=] call[name[_get_actual_request], parameter[name[request]]]
call[name[_add_request_data], parameter[name[data], name[request]]]
call[name[_add_person_data], parameter[name[data], name[request]]]
call[name[_add_lambda_context_data], parameter[name[data]]]
call[name[data]][constant[server]] assign[=] call[name[_build_server_data], parameter[]]
if name[payload_data] begin[:]
variable[data] assign[=] call[name[dict_merge], parameter[name[data], name[payload_data]]]
variable[payload] assign[=] call[name[_build_payload], parameter[name[data]]]
call[name[send_payload], parameter[name[payload], call[name[payload].get, parameter[constant[access_token]]]]]
return[call[name[data]][constant[uuid]]]
|
keyword[def] identifier[_report_exc_info] ( identifier[exc_info] , identifier[request] , identifier[extra_data] , identifier[payload_data] , identifier[level] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[_check_config] ():
keyword[return]
identifier[filtered_level] = identifier[_filtered_level] ( identifier[exc_info] [ literal[int] ])
keyword[if] identifier[level] keyword[is] keyword[None] :
identifier[level] = identifier[filtered_level]
identifier[filtered_exc_info] = identifier[events] . identifier[on_exception_info] ( identifier[exc_info] ,
identifier[request] = identifier[request] ,
identifier[extra_data] = identifier[extra_data] ,
identifier[payload_data] = identifier[payload_data] ,
identifier[level] = identifier[level] )
keyword[if] identifier[filtered_exc_info] keyword[is] keyword[False] :
keyword[return]
identifier[cls] , identifier[exc] , identifier[trace] = identifier[filtered_exc_info]
identifier[data] = identifier[_build_base_data] ( identifier[request] )
keyword[if] identifier[level] keyword[is] keyword[not] keyword[None] :
identifier[data] [ literal[string] ]= identifier[level]
identifier[trace_chain] = identifier[_walk_trace_chain] ( identifier[cls] , identifier[exc] , identifier[trace] )
identifier[extra_trace_data] = keyword[None]
keyword[if] identifier[len] ( identifier[trace_chain] )> literal[int] :
identifier[data] [ literal[string] ]={
literal[string] : identifier[trace_chain]
}
keyword[if] identifier[payload_data] keyword[and] ( literal[string] keyword[in] identifier[payload_data] ) keyword[and] ( literal[string] keyword[in] identifier[payload_data] [ literal[string] ]):
identifier[extra_trace_data] = identifier[payload_data] [ literal[string] ][ literal[string] ]
keyword[del] identifier[payload_data] [ literal[string] ][ literal[string] ]
keyword[else] :
identifier[data] [ literal[string] ]={
literal[string] : identifier[trace_chain] [ literal[int] ]
}
keyword[if] identifier[extra_data] :
identifier[extra_data] = identifier[extra_data]
keyword[if] keyword[not] identifier[isinstance] ( identifier[extra_data] , identifier[dict] ):
identifier[extra_data] ={ literal[string] : identifier[extra_data] }
keyword[if] identifier[extra_trace_data] :
identifier[extra_data] = identifier[dict_merge] ( identifier[extra_data] , identifier[extra_trace_data] )
identifier[data] [ literal[string] ]= identifier[extra_data]
keyword[if] identifier[extra_trace_data] keyword[and] keyword[not] identifier[extra_data] :
identifier[data] [ literal[string] ]= identifier[extra_trace_data]
identifier[request] = identifier[_get_actual_request] ( identifier[request] )
identifier[_add_request_data] ( identifier[data] , identifier[request] )
identifier[_add_person_data] ( identifier[data] , identifier[request] )
identifier[_add_lambda_context_data] ( identifier[data] )
identifier[data] [ literal[string] ]= identifier[_build_server_data] ()
keyword[if] identifier[payload_data] :
identifier[data] = identifier[dict_merge] ( identifier[data] , identifier[payload_data] )
identifier[payload] = identifier[_build_payload] ( identifier[data] )
identifier[send_payload] ( identifier[payload] , identifier[payload] . identifier[get] ( literal[string] ))
keyword[return] identifier[data] [ literal[string] ]
|
def _report_exc_info(exc_info, request, extra_data, payload_data, level=None):
"""
Called by report_exc_info() wrapper
"""
if not _check_config():
return # depends on [control=['if'], data=[]]
filtered_level = _filtered_level(exc_info[1])
if level is None:
level = filtered_level # depends on [control=['if'], data=['level']]
filtered_exc_info = events.on_exception_info(exc_info, request=request, extra_data=extra_data, payload_data=payload_data, level=level)
if filtered_exc_info is False:
return # depends on [control=['if'], data=[]]
(cls, exc, trace) = filtered_exc_info
data = _build_base_data(request)
if level is not None:
data['level'] = level # depends on [control=['if'], data=['level']]
# walk the trace chain to collect cause and context exceptions
trace_chain = _walk_trace_chain(cls, exc, trace)
extra_trace_data = None
if len(trace_chain) > 1:
data['body'] = {'trace_chain': trace_chain}
if payload_data and 'body' in payload_data and ('trace' in payload_data['body']):
extra_trace_data = payload_data['body']['trace']
del payload_data['body']['trace'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
data['body'] = {'trace': trace_chain[0]}
if extra_data:
extra_data = extra_data
if not isinstance(extra_data, dict):
extra_data = {'value': extra_data} # depends on [control=['if'], data=[]]
if extra_trace_data:
extra_data = dict_merge(extra_data, extra_trace_data) # depends on [control=['if'], data=[]]
data['custom'] = extra_data # depends on [control=['if'], data=[]]
if extra_trace_data and (not extra_data):
data['custom'] = extra_trace_data # depends on [control=['if'], data=[]]
request = _get_actual_request(request)
_add_request_data(data, request)
_add_person_data(data, request)
_add_lambda_context_data(data)
data['server'] = _build_server_data()
if payload_data:
data = dict_merge(data, payload_data) # depends on [control=['if'], data=[]]
payload = _build_payload(data)
send_payload(payload, payload.get('access_token'))
return data['uuid']
|
def isempty(self, tables=None):
"""
Return whether a table or the entire database is empty.
A database is empty is if it has no tables. A table is empty
if it has no rows.
Arguments:
tables (sequence of str, optional): If provided, check
that the named tables are empty. If not provided, check
that all tables are empty.
Returns:
bool: True if tables are empty, else false.
Raises:
sql.OperationalError: If one or more of the tables do not
exist.
"""
tables = tables or self.tables
for table in tables:
if self.num_rows(table) > 0:
return False
return True
|
def function[isempty, parameter[self, tables]]:
constant[
Return whether a table or the entire database is empty.
A database is empty is if it has no tables. A table is empty
if it has no rows.
Arguments:
tables (sequence of str, optional): If provided, check
that the named tables are empty. If not provided, check
that all tables are empty.
Returns:
bool: True if tables are empty, else false.
Raises:
sql.OperationalError: If one or more of the tables do not
exist.
]
variable[tables] assign[=] <ast.BoolOp object at 0x7da1b083a3e0>
for taget[name[table]] in starred[name[tables]] begin[:]
if compare[call[name[self].num_rows, parameter[name[table]]] greater[>] constant[0]] begin[:]
return[constant[False]]
return[constant[True]]
|
keyword[def] identifier[isempty] ( identifier[self] , identifier[tables] = keyword[None] ):
literal[string]
identifier[tables] = identifier[tables] keyword[or] identifier[self] . identifier[tables]
keyword[for] identifier[table] keyword[in] identifier[tables] :
keyword[if] identifier[self] . identifier[num_rows] ( identifier[table] )> literal[int] :
keyword[return] keyword[False]
keyword[return] keyword[True]
|
def isempty(self, tables=None):
"""
Return whether a table or the entire database is empty.
A database is empty is if it has no tables. A table is empty
if it has no rows.
Arguments:
tables (sequence of str, optional): If provided, check
that the named tables are empty. If not provided, check
that all tables are empty.
Returns:
bool: True if tables are empty, else false.
Raises:
sql.OperationalError: If one or more of the tables do not
exist.
"""
tables = tables or self.tables
for table in tables:
if self.num_rows(table) > 0:
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['table']]
return True
|
def create(context, name, team_id, data, active):
"""create(context, name, team_id, data, active)
Create a Remote CI
>>> dcictl remoteci-create [OPTIONS]
:param string name: Name of the Remote CI [required]
:param string team_id: ID of the team to associate this remote CI with
[required]
:param string data: JSON data to pass during remote CI creation
:param boolean active: Mark remote CI active
:param boolean no-active: Mark remote CI inactive
"""
state = utils.active_string(active)
team_id = team_id or identity.my_team_id(context)
result = remoteci.create(context, name=name, team_id=team_id,
data=data, state=state)
utils.format_output(result, context.format)
|
def function[create, parameter[context, name, team_id, data, active]]:
constant[create(context, name, team_id, data, active)
Create a Remote CI
>>> dcictl remoteci-create [OPTIONS]
:param string name: Name of the Remote CI [required]
:param string team_id: ID of the team to associate this remote CI with
[required]
:param string data: JSON data to pass during remote CI creation
:param boolean active: Mark remote CI active
:param boolean no-active: Mark remote CI inactive
]
variable[state] assign[=] call[name[utils].active_string, parameter[name[active]]]
variable[team_id] assign[=] <ast.BoolOp object at 0x7da1b237f790>
variable[result] assign[=] call[name[remoteci].create, parameter[name[context]]]
call[name[utils].format_output, parameter[name[result], name[context].format]]
|
keyword[def] identifier[create] ( identifier[context] , identifier[name] , identifier[team_id] , identifier[data] , identifier[active] ):
literal[string]
identifier[state] = identifier[utils] . identifier[active_string] ( identifier[active] )
identifier[team_id] = identifier[team_id] keyword[or] identifier[identity] . identifier[my_team_id] ( identifier[context] )
identifier[result] = identifier[remoteci] . identifier[create] ( identifier[context] , identifier[name] = identifier[name] , identifier[team_id] = identifier[team_id] ,
identifier[data] = identifier[data] , identifier[state] = identifier[state] )
identifier[utils] . identifier[format_output] ( identifier[result] , identifier[context] . identifier[format] )
|
def create(context, name, team_id, data, active):
"""create(context, name, team_id, data, active)
Create a Remote CI
>>> dcictl remoteci-create [OPTIONS]
:param string name: Name of the Remote CI [required]
:param string team_id: ID of the team to associate this remote CI with
[required]
:param string data: JSON data to pass during remote CI creation
:param boolean active: Mark remote CI active
:param boolean no-active: Mark remote CI inactive
"""
state = utils.active_string(active)
team_id = team_id or identity.my_team_id(context)
result = remoteci.create(context, name=name, team_id=team_id, data=data, state=state)
utils.format_output(result, context.format)
|
def get(self, start, end):
"""
used in unittests
"""
return [self.read_byte(addr) for addr in range(start, end)]
|
def function[get, parameter[self, start, end]]:
constant[
used in unittests
]
return[<ast.ListComp object at 0x7da1b255d600>]
|
keyword[def] identifier[get] ( identifier[self] , identifier[start] , identifier[end] ):
literal[string]
keyword[return] [ identifier[self] . identifier[read_byte] ( identifier[addr] ) keyword[for] identifier[addr] keyword[in] identifier[range] ( identifier[start] , identifier[end] )]
|
def get(self, start, end):
"""
used in unittests
"""
return [self.read_byte(addr) for addr in range(start, end)]
|
def mexican(lb, ub, n):
r"""Generate the mexican hat wavelet
The Mexican wavelet is:
.. math:: w[x] = \cos{5x} \exp^{-x^2/2}
:param lb: lower bound
:param ub: upper bound
:param int n: waveform data samples
:return: the waveform
.. plot::
:include-source:
:width: 80%
from spectrum import mexican
from pylab import plot
plot(mexican(0, 10, 100))
"""
if n <= 0:
raise ValueError("n must be strictly positive")
x = numpy.linspace(lb, ub, n)
psi = (1.-x**2.) * (2./(numpy.sqrt(3.)*pi**0.25)) * numpy.exp(-x**2/2.)
return psi
|
def function[mexican, parameter[lb, ub, n]]:
constant[Generate the mexican hat wavelet
The Mexican wavelet is:
.. math:: w[x] = \cos{5x} \exp^{-x^2/2}
:param lb: lower bound
:param ub: upper bound
:param int n: waveform data samples
:return: the waveform
.. plot::
:include-source:
:width: 80%
from spectrum import mexican
from pylab import plot
plot(mexican(0, 10, 100))
]
if compare[name[n] less_or_equal[<=] constant[0]] begin[:]
<ast.Raise object at 0x7da1b01c2a40>
variable[x] assign[=] call[name[numpy].linspace, parameter[name[lb], name[ub], name[n]]]
variable[psi] assign[=] binary_operation[binary_operation[binary_operation[constant[1.0] - binary_operation[name[x] ** constant[2.0]]] * binary_operation[constant[2.0] / binary_operation[call[name[numpy].sqrt, parameter[constant[3.0]]] * binary_operation[name[pi] ** constant[0.25]]]]] * call[name[numpy].exp, parameter[binary_operation[<ast.UnaryOp object at 0x7da1b016d030> / constant[2.0]]]]]
return[name[psi]]
|
keyword[def] identifier[mexican] ( identifier[lb] , identifier[ub] , identifier[n] ):
literal[string]
keyword[if] identifier[n] <= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[x] = identifier[numpy] . identifier[linspace] ( identifier[lb] , identifier[ub] , identifier[n] )
identifier[psi] =( literal[int] - identifier[x] ** literal[int] )*( literal[int] /( identifier[numpy] . identifier[sqrt] ( literal[int] )* identifier[pi] ** literal[int] ))* identifier[numpy] . identifier[exp] (- identifier[x] ** literal[int] / literal[int] )
keyword[return] identifier[psi]
|
def mexican(lb, ub, n):
"""Generate the mexican hat wavelet
The Mexican wavelet is:
.. math:: w[x] = \\cos{5x} \\exp^{-x^2/2}
:param lb: lower bound
:param ub: upper bound
:param int n: waveform data samples
:return: the waveform
.. plot::
:include-source:
:width: 80%
from spectrum import mexican
from pylab import plot
plot(mexican(0, 10, 100))
"""
if n <= 0:
raise ValueError('n must be strictly positive') # depends on [control=['if'], data=[]]
x = numpy.linspace(lb, ub, n)
psi = (1.0 - x ** 2.0) * (2.0 / (numpy.sqrt(3.0) * pi ** 0.25)) * numpy.exp(-x ** 2 / 2.0)
return psi
|
def filter_batched_data(data, mapping):
"""
Iterates over the data and mapping for a ColumnDataSource and
replaces columns with repeating values with a scalar. This is
purely and optimization for scalar types.
"""
for k, v in list(mapping.items()):
if isinstance(v, dict) and 'field' in v:
if 'transform' in v:
continue
v = v['field']
elif not isinstance(v, basestring):
continue
values = data[v]
try:
if len(unique_array(values)) == 1:
mapping[k] = values[0]
del data[v]
except:
pass
|
def function[filter_batched_data, parameter[data, mapping]]:
constant[
Iterates over the data and mapping for a ColumnDataSource and
replaces columns with repeating values with a scalar. This is
purely and optimization for scalar types.
]
for taget[tuple[[<ast.Name object at 0x7da1b1c8b0a0>, <ast.Name object at 0x7da1b1c8b820>]]] in starred[call[name[list], parameter[call[name[mapping].items, parameter[]]]]] begin[:]
if <ast.BoolOp object at 0x7da1b1c8b340> begin[:]
if compare[constant[transform] in name[v]] begin[:]
continue
variable[v] assign[=] call[name[v]][constant[field]]
variable[values] assign[=] call[name[data]][name[v]]
<ast.Try object at 0x7da20c993a30>
|
keyword[def] identifier[filter_batched_data] ( identifier[data] , identifier[mapping] ):
literal[string]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[list] ( identifier[mapping] . identifier[items] ()):
keyword[if] identifier[isinstance] ( identifier[v] , identifier[dict] ) keyword[and] literal[string] keyword[in] identifier[v] :
keyword[if] literal[string] keyword[in] identifier[v] :
keyword[continue]
identifier[v] = identifier[v] [ literal[string] ]
keyword[elif] keyword[not] identifier[isinstance] ( identifier[v] , identifier[basestring] ):
keyword[continue]
identifier[values] = identifier[data] [ identifier[v] ]
keyword[try] :
keyword[if] identifier[len] ( identifier[unique_array] ( identifier[values] ))== literal[int] :
identifier[mapping] [ identifier[k] ]= identifier[values] [ literal[int] ]
keyword[del] identifier[data] [ identifier[v] ]
keyword[except] :
keyword[pass]
|
def filter_batched_data(data, mapping):
"""
Iterates over the data and mapping for a ColumnDataSource and
replaces columns with repeating values with a scalar. This is
purely and optimization for scalar types.
"""
for (k, v) in list(mapping.items()):
if isinstance(v, dict) and 'field' in v:
if 'transform' in v:
continue # depends on [control=['if'], data=[]]
v = v['field'] # depends on [control=['if'], data=[]]
elif not isinstance(v, basestring):
continue # depends on [control=['if'], data=[]]
values = data[v]
try:
if len(unique_array(values)) == 1:
mapping[k] = values[0]
del data[v] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]]
|
async def create_room(self, alias: Optional[str] = None, is_public: bool = False,
name: Optional[str] = None, topic: Optional[str] = None,
is_direct: bool = False, invitees: Optional[List[str]] = None,
initial_state: Optional[List[dict]] = None) -> str:
"""
Create a new room. See also: `API reference`_
Args:
alias: The desired room alias **local part**. If this is included, a room alias will be
created and mapped to the newly created room. The alias will belong on the same
homeserver which created the room. For example, if this was set to "foo" and sent to
the homeserver "example.com" the complete room alias would be ``#foo:example.com``.
is_public: This flag sets the state event preset to ``public_chat``, which sets
``join_rules`` to ``public``. Defaults to false, which sets ``join_rules`` to
``invite``.
name: If this is included, an ``m.room.name`` event will be sent into the room to
indicate the name of the room. See `Room Events`_ for more information on
``m.room.name``.
topic: If this is included, an ``m.room.topic`` event will be sent into the room to
indicate the topic for the room. See `Room Events`_ for more information on
``m.room.topic``.
is_direct: This flag makes the server set the ``is_direct`` flag on the
``m.room.member`` events sent to the users in ``invite`` and ``invite_3pid``. See
`Direct Messaging`_ for more information.
invitees: A list of user IDs to invite to the room. This will tell the server to invite
everyone in the list to the newly created room.
initial_state: A list of state events to set in the new room. This allows the user to
override the default state events set in the new room. The expected format of the
state events are an object with type, state_key and content keys set.
Takes precedence over events set by `is_public`, but gets overriden by ``name`` and
``topic keys``.
Returns:
The ID of the newly created room.
Raises:
MatrixResponseError: If the response does not contain a ``room_id`` field.
.. _API reference:
https://matrix.org/docs/spec/client_server/r0.3.0.html#post-matrix-client-r0-createroom
.. _Room Events:
https://matrix.org/docs/spec/client_server/r0.3.0.html#room-events
.. _Direct Messaging:
https://matrix.org/docs/spec/client_server/r0.3.0.html#direct-messaging
"""
await self.ensure_registered()
content = {
"visibility": "private",
"is_direct": is_direct,
"preset": "public_chat" if is_public else "private_chat",
}
if alias:
content["room_alias_name"] = alias
if invitees:
content["invite"] = invitees
if name:
content["name"] = name
if topic:
content["topic"] = topic
if initial_state:
content["initial_state"] = initial_state
resp = await self.client.request("POST", "/createRoom", content)
try:
return resp["room_id"]
except KeyError:
raise MatrixResponseError("Room create response did not contain room_id.")
|
<ast.AsyncFunctionDef object at 0x7da20e9b19c0>
|
keyword[async] keyword[def] identifier[create_room] ( identifier[self] , identifier[alias] : identifier[Optional] [ identifier[str] ]= keyword[None] , identifier[is_public] : identifier[bool] = keyword[False] ,
identifier[name] : identifier[Optional] [ identifier[str] ]= keyword[None] , identifier[topic] : identifier[Optional] [ identifier[str] ]= keyword[None] ,
identifier[is_direct] : identifier[bool] = keyword[False] , identifier[invitees] : identifier[Optional] [ identifier[List] [ identifier[str] ]]= keyword[None] ,
identifier[initial_state] : identifier[Optional] [ identifier[List] [ identifier[dict] ]]= keyword[None] )-> identifier[str] :
literal[string]
keyword[await] identifier[self] . identifier[ensure_registered] ()
identifier[content] ={
literal[string] : literal[string] ,
literal[string] : identifier[is_direct] ,
literal[string] : literal[string] keyword[if] identifier[is_public] keyword[else] literal[string] ,
}
keyword[if] identifier[alias] :
identifier[content] [ literal[string] ]= identifier[alias]
keyword[if] identifier[invitees] :
identifier[content] [ literal[string] ]= identifier[invitees]
keyword[if] identifier[name] :
identifier[content] [ literal[string] ]= identifier[name]
keyword[if] identifier[topic] :
identifier[content] [ literal[string] ]= identifier[topic]
keyword[if] identifier[initial_state] :
identifier[content] [ literal[string] ]= identifier[initial_state]
identifier[resp] = keyword[await] identifier[self] . identifier[client] . identifier[request] ( literal[string] , literal[string] , identifier[content] )
keyword[try] :
keyword[return] identifier[resp] [ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[MatrixResponseError] ( literal[string] )
|
async def create_room(self, alias: Optional[str]=None, is_public: bool=False, name: Optional[str]=None, topic: Optional[str]=None, is_direct: bool=False, invitees: Optional[List[str]]=None, initial_state: Optional[List[dict]]=None) -> str:
"""
Create a new room. See also: `API reference`_
Args:
alias: The desired room alias **local part**. If this is included, a room alias will be
created and mapped to the newly created room. The alias will belong on the same
homeserver which created the room. For example, if this was set to "foo" and sent to
the homeserver "example.com" the complete room alias would be ``#foo:example.com``.
is_public: This flag sets the state event preset to ``public_chat``, which sets
``join_rules`` to ``public``. Defaults to false, which sets ``join_rules`` to
``invite``.
name: If this is included, an ``m.room.name`` event will be sent into the room to
indicate the name of the room. See `Room Events`_ for more information on
``m.room.name``.
topic: If this is included, an ``m.room.topic`` event will be sent into the room to
indicate the topic for the room. See `Room Events`_ for more information on
``m.room.topic``.
is_direct: This flag makes the server set the ``is_direct`` flag on the
``m.room.member`` events sent to the users in ``invite`` and ``invite_3pid``. See
`Direct Messaging`_ for more information.
invitees: A list of user IDs to invite to the room. This will tell the server to invite
everyone in the list to the newly created room.
initial_state: A list of state events to set in the new room. This allows the user to
override the default state events set in the new room. The expected format of the
state events are an object with type, state_key and content keys set.
Takes precedence over events set by `is_public`, but gets overriden by ``name`` and
``topic keys``.
Returns:
The ID of the newly created room.
Raises:
MatrixResponseError: If the response does not contain a ``room_id`` field.
.. _API reference:
https://matrix.org/docs/spec/client_server/r0.3.0.html#post-matrix-client-r0-createroom
.. _Room Events:
https://matrix.org/docs/spec/client_server/r0.3.0.html#room-events
.. _Direct Messaging:
https://matrix.org/docs/spec/client_server/r0.3.0.html#direct-messaging
"""
await self.ensure_registered()
content = {'visibility': 'private', 'is_direct': is_direct, 'preset': 'public_chat' if is_public else 'private_chat'}
if alias:
content['room_alias_name'] = alias # depends on [control=['if'], data=[]]
if invitees:
content['invite'] = invitees # depends on [control=['if'], data=[]]
if name:
content['name'] = name # depends on [control=['if'], data=[]]
if topic:
content['topic'] = topic # depends on [control=['if'], data=[]]
if initial_state:
content['initial_state'] = initial_state # depends on [control=['if'], data=[]]
resp = await self.client.request('POST', '/createRoom', content)
try:
return resp['room_id'] # depends on [control=['try'], data=[]]
except KeyError:
raise MatrixResponseError('Room create response did not contain room_id.') # depends on [control=['except'], data=[]]
|
def loadable(self, src, dst):
"""
Determines if there's enough space to load the target database.
"""
from fabric import state
from fabric.task_utils import crawl
src_task = crawl(src, state.commands)
assert src_task, 'Unknown source role: %s' % src
dst_task = crawl(dst, state.commands)
assert dst_task, 'Unknown destination role: %s' % src
# Get source database size.
src_task()
env.host_string = env.hosts[0]
src_size_bytes = self.get_size()
# Get target database size, if any.
dst_task()
env.host_string = env.hosts[0]
try:
dst_size_bytes = self.get_size()
except (ValueError, TypeError):
dst_size_bytes = 0
# Get target host disk size.
free_space_bytes = self.get_free_space()
# Deduct existing database size, because we'll be deleting it.
balance_bytes = free_space_bytes + dst_size_bytes - src_size_bytes
balance_bytes_scaled, units = pretty_bytes(balance_bytes)
viable = balance_bytes >= 0
if self.verbose:
print('src_db_size:', pretty_bytes(src_size_bytes))
print('dst_db_size:', pretty_bytes(dst_size_bytes))
print('dst_free_space:', pretty_bytes(free_space_bytes))
print
if viable:
print('Viable! There will be %.02f %s of disk space left.' % (balance_bytes_scaled, units))
else:
print('Not viable! We would be %.02f %s short.' % (balance_bytes_scaled, units))
return viable
|
def function[loadable, parameter[self, src, dst]]:
constant[
Determines if there's enough space to load the target database.
]
from relative_module[fabric] import module[state]
from relative_module[fabric.task_utils] import module[crawl]
variable[src_task] assign[=] call[name[crawl], parameter[name[src], name[state].commands]]
assert[name[src_task]]
variable[dst_task] assign[=] call[name[crawl], parameter[name[dst], name[state].commands]]
assert[name[dst_task]]
call[name[src_task], parameter[]]
name[env].host_string assign[=] call[name[env].hosts][constant[0]]
variable[src_size_bytes] assign[=] call[name[self].get_size, parameter[]]
call[name[dst_task], parameter[]]
name[env].host_string assign[=] call[name[env].hosts][constant[0]]
<ast.Try object at 0x7da1b003dd50>
variable[free_space_bytes] assign[=] call[name[self].get_free_space, parameter[]]
variable[balance_bytes] assign[=] binary_operation[binary_operation[name[free_space_bytes] + name[dst_size_bytes]] - name[src_size_bytes]]
<ast.Tuple object at 0x7da1b00b98d0> assign[=] call[name[pretty_bytes], parameter[name[balance_bytes]]]
variable[viable] assign[=] compare[name[balance_bytes] greater_or_equal[>=] constant[0]]
if name[self].verbose begin[:]
call[name[print], parameter[constant[src_db_size:], call[name[pretty_bytes], parameter[name[src_size_bytes]]]]]
call[name[print], parameter[constant[dst_db_size:], call[name[pretty_bytes], parameter[name[dst_size_bytes]]]]]
call[name[print], parameter[constant[dst_free_space:], call[name[pretty_bytes], parameter[name[free_space_bytes]]]]]
name[print]
if name[viable] begin[:]
call[name[print], parameter[binary_operation[constant[Viable! There will be %.02f %s of disk space left.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0049780>, <ast.Name object at 0x7da1b0049750>]]]]]
return[name[viable]]
|
keyword[def] identifier[loadable] ( identifier[self] , identifier[src] , identifier[dst] ):
literal[string]
keyword[from] identifier[fabric] keyword[import] identifier[state]
keyword[from] identifier[fabric] . identifier[task_utils] keyword[import] identifier[crawl]
identifier[src_task] = identifier[crawl] ( identifier[src] , identifier[state] . identifier[commands] )
keyword[assert] identifier[src_task] , literal[string] % identifier[src]
identifier[dst_task] = identifier[crawl] ( identifier[dst] , identifier[state] . identifier[commands] )
keyword[assert] identifier[dst_task] , literal[string] % identifier[src]
identifier[src_task] ()
identifier[env] . identifier[host_string] = identifier[env] . identifier[hosts] [ literal[int] ]
identifier[src_size_bytes] = identifier[self] . identifier[get_size] ()
identifier[dst_task] ()
identifier[env] . identifier[host_string] = identifier[env] . identifier[hosts] [ literal[int] ]
keyword[try] :
identifier[dst_size_bytes] = identifier[self] . identifier[get_size] ()
keyword[except] ( identifier[ValueError] , identifier[TypeError] ):
identifier[dst_size_bytes] = literal[int]
identifier[free_space_bytes] = identifier[self] . identifier[get_free_space] ()
identifier[balance_bytes] = identifier[free_space_bytes] + identifier[dst_size_bytes] - identifier[src_size_bytes]
identifier[balance_bytes_scaled] , identifier[units] = identifier[pretty_bytes] ( identifier[balance_bytes] )
identifier[viable] = identifier[balance_bytes] >= literal[int]
keyword[if] identifier[self] . identifier[verbose] :
identifier[print] ( literal[string] , identifier[pretty_bytes] ( identifier[src_size_bytes] ))
identifier[print] ( literal[string] , identifier[pretty_bytes] ( identifier[dst_size_bytes] ))
identifier[print] ( literal[string] , identifier[pretty_bytes] ( identifier[free_space_bytes] ))
identifier[print]
keyword[if] identifier[viable] :
identifier[print] ( literal[string] %( identifier[balance_bytes_scaled] , identifier[units] ))
keyword[else] :
identifier[print] ( literal[string] %( identifier[balance_bytes_scaled] , identifier[units] ))
keyword[return] identifier[viable]
|
def loadable(self, src, dst):
"""
Determines if there's enough space to load the target database.
"""
from fabric import state
from fabric.task_utils import crawl
src_task = crawl(src, state.commands)
assert src_task, 'Unknown source role: %s' % src
dst_task = crawl(dst, state.commands)
assert dst_task, 'Unknown destination role: %s' % src
# Get source database size.
src_task()
env.host_string = env.hosts[0]
src_size_bytes = self.get_size()
# Get target database size, if any.
dst_task()
env.host_string = env.hosts[0]
try:
dst_size_bytes = self.get_size() # depends on [control=['try'], data=[]]
except (ValueError, TypeError):
dst_size_bytes = 0 # depends on [control=['except'], data=[]]
# Get target host disk size.
free_space_bytes = self.get_free_space()
# Deduct existing database size, because we'll be deleting it.
balance_bytes = free_space_bytes + dst_size_bytes - src_size_bytes
(balance_bytes_scaled, units) = pretty_bytes(balance_bytes)
viable = balance_bytes >= 0
if self.verbose:
print('src_db_size:', pretty_bytes(src_size_bytes))
print('dst_db_size:', pretty_bytes(dst_size_bytes))
print('dst_free_space:', pretty_bytes(free_space_bytes))
print
if viable:
print('Viable! There will be %.02f %s of disk space left.' % (balance_bytes_scaled, units)) # depends on [control=['if'], data=[]]
else:
print('Not viable! We would be %.02f %s short.' % (balance_bytes_scaled, units)) # depends on [control=['if'], data=[]]
return viable
|
def _get_ignore_from_manifest(filename):
"""Gather the various ignore patterns from a MANIFEST.in.
Returns a list of standard ignore patterns and a list of regular
expressions to ignore.
"""
class MyTextFile(TextFile):
def error(self, msg, line=None): # pragma: nocover
# (this is never called by TextFile in current versions of CPython)
raise Failure(self.gen_error(msg, line))
def warn(self, msg, line=None):
warning(self.gen_error(msg, line))
template = MyTextFile(filename,
strip_comments=True,
skip_blanks=True,
join_lines=True,
lstrip_ws=True,
rstrip_ws=True,
collapse_join=True)
try:
lines = template.readlines()
finally:
template.close()
return _get_ignore_from_manifest_lines(lines)
|
def function[_get_ignore_from_manifest, parameter[filename]]:
constant[Gather the various ignore patterns from a MANIFEST.in.
Returns a list of standard ignore patterns and a list of regular
expressions to ignore.
]
class class[MyTextFile, parameter[]] begin[:]
def function[error, parameter[self, msg, line]]:
<ast.Raise object at 0x7da1b23462f0>
def function[warn, parameter[self, msg, line]]:
call[name[warning], parameter[call[name[self].gen_error, parameter[name[msg], name[line]]]]]
variable[template] assign[=] call[name[MyTextFile], parameter[name[filename]]]
<ast.Try object at 0x7da1b15d2530>
return[call[name[_get_ignore_from_manifest_lines], parameter[name[lines]]]]
|
keyword[def] identifier[_get_ignore_from_manifest] ( identifier[filename] ):
literal[string]
keyword[class] identifier[MyTextFile] ( identifier[TextFile] ):
keyword[def] identifier[error] ( identifier[self] , identifier[msg] , identifier[line] = keyword[None] ):
keyword[raise] identifier[Failure] ( identifier[self] . identifier[gen_error] ( identifier[msg] , identifier[line] ))
keyword[def] identifier[warn] ( identifier[self] , identifier[msg] , identifier[line] = keyword[None] ):
identifier[warning] ( identifier[self] . identifier[gen_error] ( identifier[msg] , identifier[line] ))
identifier[template] = identifier[MyTextFile] ( identifier[filename] ,
identifier[strip_comments] = keyword[True] ,
identifier[skip_blanks] = keyword[True] ,
identifier[join_lines] = keyword[True] ,
identifier[lstrip_ws] = keyword[True] ,
identifier[rstrip_ws] = keyword[True] ,
identifier[collapse_join] = keyword[True] )
keyword[try] :
identifier[lines] = identifier[template] . identifier[readlines] ()
keyword[finally] :
identifier[template] . identifier[close] ()
keyword[return] identifier[_get_ignore_from_manifest_lines] ( identifier[lines] )
|
def _get_ignore_from_manifest(filename):
"""Gather the various ignore patterns from a MANIFEST.in.
Returns a list of standard ignore patterns and a list of regular
expressions to ignore.
"""
class MyTextFile(TextFile):
def error(self, msg, line=None): # pragma: nocover
# (this is never called by TextFile in current versions of CPython)
raise Failure(self.gen_error(msg, line))
def warn(self, msg, line=None):
warning(self.gen_error(msg, line))
template = MyTextFile(filename, strip_comments=True, skip_blanks=True, join_lines=True, lstrip_ws=True, rstrip_ws=True, collapse_join=True)
try:
lines = template.readlines() # depends on [control=['try'], data=[]]
finally:
template.close()
return _get_ignore_from_manifest_lines(lines)
|
def create_alias(self, userid, data):
"""Create alias address"""
return self.api_call(
ENDPOINTS['aliases']['new'],
dict(userid=userid),
body=data)
|
def function[create_alias, parameter[self, userid, data]]:
constant[Create alias address]
return[call[name[self].api_call, parameter[call[call[name[ENDPOINTS]][constant[aliases]]][constant[new]], call[name[dict], parameter[]]]]]
|
keyword[def] identifier[create_alias] ( identifier[self] , identifier[userid] , identifier[data] ):
literal[string]
keyword[return] identifier[self] . identifier[api_call] (
identifier[ENDPOINTS] [ literal[string] ][ literal[string] ],
identifier[dict] ( identifier[userid] = identifier[userid] ),
identifier[body] = identifier[data] )
|
def create_alias(self, userid, data):
"""Create alias address"""
return self.api_call(ENDPOINTS['aliases']['new'], dict(userid=userid), body=data)
|
def load_plugin():
"""Adds EV3 buttons on toolbar and commands under Run and Tools menu. Add EV3 configuration window."""
# Add EV3 configuration window
workbench=get_workbench()
workbench.set_default("ev3.ip", "192.168.0.1")
workbench.set_default("ev3.username", "robot")
workbench.set_default("ev3.password", "maker")
workbench.add_configuration_page("EV3", Ev3ConfigurationPage)
# icons
image_path_remoterun = os.path.join(os.path.dirname(__file__), "res", "remoterun.gif")
image_path_remotedebug = os.path.join(os.path.dirname(__file__), "res", "remotedebug.gif")
image_path_upload = os.path.join(os.path.dirname(__file__), "res", "up.gif")
image_path_run = os.path.join(os.path.dirname(__file__), "res", "flash.gif")
image_path_log = os.path.join(os.path.dirname(__file__), "res", "log.gif")
image_path_clean = os.path.join(os.path.dirname(__file__), "res", "clean.gif")
# menu buttons
get_workbench().add_command("ev3remoterun", "run", "Run current script using the EV3 API in remote control mode" ,
get_button_handler_for_magiccmd_on_current_file("Ev3RemoteRun"),
currentscript_and_command_enabled,
default_sequence="<F9>",
group=20,
image_filename=image_path_remoterun,
include_in_toolbar=True)
get_workbench().add_command("ev3remotedebug", "run", "Debug current script using the EV3 API in remote control mode" ,
get_button_handler_for_magiccmd_on_current_file("Ev3RemoteDebug"),
currentscript_and_command_enabled,
default_sequence="<Control-F9>",
group=20,
image_filename=image_path_remotedebug,
include_in_toolbar=True)
get_workbench().add_command("ev3patch", "tools", "Install ev3dev additions to the ev3dev sdcard on the EV3",
patch_ev3,
command_enabled,
default_sequence=None,
group=270,
#image_filename=image_path_upload,
include_in_toolbar=False)
get_workbench().add_command("ev3softreset", "tools", "Soft reset the EV3 (stop programs,rpyc started sound/motors,restart brickman and rpycd service)",
soft_reset_ev3,
command_enabled,
default_sequence=None,
group=275,
#image_filename=image_path_clean,
include_in_toolbar=False)
get_workbench().add_command("ev3upload", "tools", "Upload current script to EV3",
upload_current_script,
currentscript_and_command_enabled,
default_sequence="<F10>",
group=280,
image_filename=image_path_upload,
include_in_toolbar=True)
get_workbench().add_command("ev3run", "tools", "Start current script on the EV3",
start_current_script,
currentscript_and_command_enabled,
default_sequence="<Control-F10>",
group=280,
image_filename=image_path_run,
include_in_toolbar=True)
get_workbench().add_command("ev3log", "tools", "Download log of current script from EV3",
download_log_of_current_script,
currentscript_and_command_enabled,
default_sequence=None,
group=280,
image_filename=image_path_log,
include_in_toolbar=True)
get_workbench().add_command("ev3clean", "tools", "Cleanup EV3 by deleting all files stored in homedir on EV3",
cleanup_files_on_ev3,
command_enabled,
default_sequence=None,
group=290,
image_filename=image_path_clean,
include_in_toolbar=True)
orig_interrupt_backend=get_runner().interrupt_backend
def wrapped_interrupt_backend():
# kill program on pc
orig_interrupt_backend()
# stop programmings running on ev3 and stop sound/motors via rpyc
stop_ev3_programs__and__rpyc_motors_sound()
get_runner().interrupt_backend = wrapped_interrupt_backend
# magic commands
shell = get_workbench().get_view("ShellView")
shell.add_command("Ev3RemoteRun", _handle_rundebug_from_shell)
shell.add_command("Ev3RemoteDebug", _handle_rundebug_from_shell)
shell.add_command("Reset", _handle_reset_from_shell)
shell.add_command("pwd", _handle_pwd_from_shell)
shell.add_command("cd", _handle_cd_from_shell)
shell.add_command("ls", _handle_ls_from_shell)
shell.add_command("help", _handle_help_from_shell)
shell.add_command("open", _handle_open_from_shell)
shell.add_command("reload", _handle_reload_from_shell)
|
def function[load_plugin, parameter[]]:
constant[Adds EV3 buttons on toolbar and commands under Run and Tools menu. Add EV3 configuration window.]
variable[workbench] assign[=] call[name[get_workbench], parameter[]]
call[name[workbench].set_default, parameter[constant[ev3.ip], constant[192.168.0.1]]]
call[name[workbench].set_default, parameter[constant[ev3.username], constant[robot]]]
call[name[workbench].set_default, parameter[constant[ev3.password], constant[maker]]]
call[name[workbench].add_configuration_page, parameter[constant[EV3], name[Ev3ConfigurationPage]]]
variable[image_path_remoterun] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[__file__]]], constant[res], constant[remoterun.gif]]]
variable[image_path_remotedebug] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[__file__]]], constant[res], constant[remotedebug.gif]]]
variable[image_path_upload] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[__file__]]], constant[res], constant[up.gif]]]
variable[image_path_run] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[__file__]]], constant[res], constant[flash.gif]]]
variable[image_path_log] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[__file__]]], constant[res], constant[log.gif]]]
variable[image_path_clean] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[__file__]]], constant[res], constant[clean.gif]]]
call[call[name[get_workbench], parameter[]].add_command, parameter[constant[ev3remoterun], constant[run], constant[Run current script using the EV3 API in remote control mode], call[name[get_button_handler_for_magiccmd_on_current_file], parameter[constant[Ev3RemoteRun]]], name[currentscript_and_command_enabled]]]
call[call[name[get_workbench], parameter[]].add_command, parameter[constant[ev3remotedebug], constant[run], constant[Debug current script using the EV3 API in remote control mode], call[name[get_button_handler_for_magiccmd_on_current_file], parameter[constant[Ev3RemoteDebug]]], name[currentscript_and_command_enabled]]]
call[call[name[get_workbench], parameter[]].add_command, parameter[constant[ev3patch], constant[tools], constant[Install ev3dev additions to the ev3dev sdcard on the EV3], name[patch_ev3], name[command_enabled]]]
call[call[name[get_workbench], parameter[]].add_command, parameter[constant[ev3softreset], constant[tools], constant[Soft reset the EV3 (stop programs,rpyc started sound/motors,restart brickman and rpycd service)], name[soft_reset_ev3], name[command_enabled]]]
call[call[name[get_workbench], parameter[]].add_command, parameter[constant[ev3upload], constant[tools], constant[Upload current script to EV3], name[upload_current_script], name[currentscript_and_command_enabled]]]
call[call[name[get_workbench], parameter[]].add_command, parameter[constant[ev3run], constant[tools], constant[Start current script on the EV3], name[start_current_script], name[currentscript_and_command_enabled]]]
call[call[name[get_workbench], parameter[]].add_command, parameter[constant[ev3log], constant[tools], constant[Download log of current script from EV3], name[download_log_of_current_script], name[currentscript_and_command_enabled]]]
call[call[name[get_workbench], parameter[]].add_command, parameter[constant[ev3clean], constant[tools], constant[Cleanup EV3 by deleting all files stored in homedir on EV3], name[cleanup_files_on_ev3], name[command_enabled]]]
variable[orig_interrupt_backend] assign[=] call[name[get_runner], parameter[]].interrupt_backend
def function[wrapped_interrupt_backend, parameter[]]:
call[name[orig_interrupt_backend], parameter[]]
call[name[stop_ev3_programs__and__rpyc_motors_sound], parameter[]]
call[name[get_runner], parameter[]].interrupt_backend assign[=] name[wrapped_interrupt_backend]
variable[shell] assign[=] call[call[name[get_workbench], parameter[]].get_view, parameter[constant[ShellView]]]
call[name[shell].add_command, parameter[constant[Ev3RemoteRun], name[_handle_rundebug_from_shell]]]
call[name[shell].add_command, parameter[constant[Ev3RemoteDebug], name[_handle_rundebug_from_shell]]]
call[name[shell].add_command, parameter[constant[Reset], name[_handle_reset_from_shell]]]
call[name[shell].add_command, parameter[constant[pwd], name[_handle_pwd_from_shell]]]
call[name[shell].add_command, parameter[constant[cd], name[_handle_cd_from_shell]]]
call[name[shell].add_command, parameter[constant[ls], name[_handle_ls_from_shell]]]
call[name[shell].add_command, parameter[constant[help], name[_handle_help_from_shell]]]
call[name[shell].add_command, parameter[constant[open], name[_handle_open_from_shell]]]
call[name[shell].add_command, parameter[constant[reload], name[_handle_reload_from_shell]]]
|
keyword[def] identifier[load_plugin] ():
literal[string]
identifier[workbench] = identifier[get_workbench] ()
identifier[workbench] . identifier[set_default] ( literal[string] , literal[string] )
identifier[workbench] . identifier[set_default] ( literal[string] , literal[string] )
identifier[workbench] . identifier[set_default] ( literal[string] , literal[string] )
identifier[workbench] . identifier[add_configuration_page] ( literal[string] , identifier[Ev3ConfigurationPage] )
identifier[image_path_remoterun] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ), literal[string] , literal[string] )
identifier[image_path_remotedebug] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ), literal[string] , literal[string] )
identifier[image_path_upload] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ), literal[string] , literal[string] )
identifier[image_path_run] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ), literal[string] , literal[string] )
identifier[image_path_log] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ), literal[string] , literal[string] )
identifier[image_path_clean] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ), literal[string] , literal[string] )
identifier[get_workbench] (). identifier[add_command] ( literal[string] , literal[string] , literal[string] ,
identifier[get_button_handler_for_magiccmd_on_current_file] ( literal[string] ),
identifier[currentscript_and_command_enabled] ,
identifier[default_sequence] = literal[string] ,
identifier[group] = literal[int] ,
identifier[image_filename] = identifier[image_path_remoterun] ,
identifier[include_in_toolbar] = keyword[True] )
identifier[get_workbench] (). identifier[add_command] ( literal[string] , literal[string] , literal[string] ,
identifier[get_button_handler_for_magiccmd_on_current_file] ( literal[string] ),
identifier[currentscript_and_command_enabled] ,
identifier[default_sequence] = literal[string] ,
identifier[group] = literal[int] ,
identifier[image_filename] = identifier[image_path_remotedebug] ,
identifier[include_in_toolbar] = keyword[True] )
identifier[get_workbench] (). identifier[add_command] ( literal[string] , literal[string] , literal[string] ,
identifier[patch_ev3] ,
identifier[command_enabled] ,
identifier[default_sequence] = keyword[None] ,
identifier[group] = literal[int] ,
identifier[include_in_toolbar] = keyword[False] )
identifier[get_workbench] (). identifier[add_command] ( literal[string] , literal[string] , literal[string] ,
identifier[soft_reset_ev3] ,
identifier[command_enabled] ,
identifier[default_sequence] = keyword[None] ,
identifier[group] = literal[int] ,
identifier[include_in_toolbar] = keyword[False] )
identifier[get_workbench] (). identifier[add_command] ( literal[string] , literal[string] , literal[string] ,
identifier[upload_current_script] ,
identifier[currentscript_and_command_enabled] ,
identifier[default_sequence] = literal[string] ,
identifier[group] = literal[int] ,
identifier[image_filename] = identifier[image_path_upload] ,
identifier[include_in_toolbar] = keyword[True] )
identifier[get_workbench] (). identifier[add_command] ( literal[string] , literal[string] , literal[string] ,
identifier[start_current_script] ,
identifier[currentscript_and_command_enabled] ,
identifier[default_sequence] = literal[string] ,
identifier[group] = literal[int] ,
identifier[image_filename] = identifier[image_path_run] ,
identifier[include_in_toolbar] = keyword[True] )
identifier[get_workbench] (). identifier[add_command] ( literal[string] , literal[string] , literal[string] ,
identifier[download_log_of_current_script] ,
identifier[currentscript_and_command_enabled] ,
identifier[default_sequence] = keyword[None] ,
identifier[group] = literal[int] ,
identifier[image_filename] = identifier[image_path_log] ,
identifier[include_in_toolbar] = keyword[True] )
identifier[get_workbench] (). identifier[add_command] ( literal[string] , literal[string] , literal[string] ,
identifier[cleanup_files_on_ev3] ,
identifier[command_enabled] ,
identifier[default_sequence] = keyword[None] ,
identifier[group] = literal[int] ,
identifier[image_filename] = identifier[image_path_clean] ,
identifier[include_in_toolbar] = keyword[True] )
identifier[orig_interrupt_backend] = identifier[get_runner] (). identifier[interrupt_backend]
keyword[def] identifier[wrapped_interrupt_backend] ():
identifier[orig_interrupt_backend] ()
identifier[stop_ev3_programs__and__rpyc_motors_sound] ()
identifier[get_runner] (). identifier[interrupt_backend] = identifier[wrapped_interrupt_backend]
identifier[shell] = identifier[get_workbench] (). identifier[get_view] ( literal[string] )
identifier[shell] . identifier[add_command] ( literal[string] , identifier[_handle_rundebug_from_shell] )
identifier[shell] . identifier[add_command] ( literal[string] , identifier[_handle_rundebug_from_shell] )
identifier[shell] . identifier[add_command] ( literal[string] , identifier[_handle_reset_from_shell] )
identifier[shell] . identifier[add_command] ( literal[string] , identifier[_handle_pwd_from_shell] )
identifier[shell] . identifier[add_command] ( literal[string] , identifier[_handle_cd_from_shell] )
identifier[shell] . identifier[add_command] ( literal[string] , identifier[_handle_ls_from_shell] )
identifier[shell] . identifier[add_command] ( literal[string] , identifier[_handle_help_from_shell] )
identifier[shell] . identifier[add_command] ( literal[string] , identifier[_handle_open_from_shell] )
identifier[shell] . identifier[add_command] ( literal[string] , identifier[_handle_reload_from_shell] )
|
def load_plugin():
"""Adds EV3 buttons on toolbar and commands under Run and Tools menu. Add EV3 configuration window."""
# Add EV3 configuration window
workbench = get_workbench()
workbench.set_default('ev3.ip', '192.168.0.1')
workbench.set_default('ev3.username', 'robot')
workbench.set_default('ev3.password', 'maker')
workbench.add_configuration_page('EV3', Ev3ConfigurationPage)
# icons
image_path_remoterun = os.path.join(os.path.dirname(__file__), 'res', 'remoterun.gif')
image_path_remotedebug = os.path.join(os.path.dirname(__file__), 'res', 'remotedebug.gif')
image_path_upload = os.path.join(os.path.dirname(__file__), 'res', 'up.gif')
image_path_run = os.path.join(os.path.dirname(__file__), 'res', 'flash.gif')
image_path_log = os.path.join(os.path.dirname(__file__), 'res', 'log.gif')
image_path_clean = os.path.join(os.path.dirname(__file__), 'res', 'clean.gif')
# menu buttons
get_workbench().add_command('ev3remoterun', 'run', 'Run current script using the EV3 API in remote control mode', get_button_handler_for_magiccmd_on_current_file('Ev3RemoteRun'), currentscript_and_command_enabled, default_sequence='<F9>', group=20, image_filename=image_path_remoterun, include_in_toolbar=True)
get_workbench().add_command('ev3remotedebug', 'run', 'Debug current script using the EV3 API in remote control mode', get_button_handler_for_magiccmd_on_current_file('Ev3RemoteDebug'), currentscript_and_command_enabled, default_sequence='<Control-F9>', group=20, image_filename=image_path_remotedebug, include_in_toolbar=True)
#image_filename=image_path_upload,
get_workbench().add_command('ev3patch', 'tools', 'Install ev3dev additions to the ev3dev sdcard on the EV3', patch_ev3, command_enabled, default_sequence=None, group=270, include_in_toolbar=False)
#image_filename=image_path_clean,
get_workbench().add_command('ev3softreset', 'tools', 'Soft reset the EV3 (stop programs,rpyc started sound/motors,restart brickman and rpycd service)', soft_reset_ev3, command_enabled, default_sequence=None, group=275, include_in_toolbar=False)
get_workbench().add_command('ev3upload', 'tools', 'Upload current script to EV3', upload_current_script, currentscript_and_command_enabled, default_sequence='<F10>', group=280, image_filename=image_path_upload, include_in_toolbar=True)
get_workbench().add_command('ev3run', 'tools', 'Start current script on the EV3', start_current_script, currentscript_and_command_enabled, default_sequence='<Control-F10>', group=280, image_filename=image_path_run, include_in_toolbar=True)
get_workbench().add_command('ev3log', 'tools', 'Download log of current script from EV3', download_log_of_current_script, currentscript_and_command_enabled, default_sequence=None, group=280, image_filename=image_path_log, include_in_toolbar=True)
get_workbench().add_command('ev3clean', 'tools', 'Cleanup EV3 by deleting all files stored in homedir on EV3', cleanup_files_on_ev3, command_enabled, default_sequence=None, group=290, image_filename=image_path_clean, include_in_toolbar=True)
orig_interrupt_backend = get_runner().interrupt_backend
def wrapped_interrupt_backend():
# kill program on pc
orig_interrupt_backend()
# stop programmings running on ev3 and stop sound/motors via rpyc
stop_ev3_programs__and__rpyc_motors_sound()
get_runner().interrupt_backend = wrapped_interrupt_backend
# magic commands
shell = get_workbench().get_view('ShellView')
shell.add_command('Ev3RemoteRun', _handle_rundebug_from_shell)
shell.add_command('Ev3RemoteDebug', _handle_rundebug_from_shell)
shell.add_command('Reset', _handle_reset_from_shell)
shell.add_command('pwd', _handle_pwd_from_shell)
shell.add_command('cd', _handle_cd_from_shell)
shell.add_command('ls', _handle_ls_from_shell)
shell.add_command('help', _handle_help_from_shell)
shell.add_command('open', _handle_open_from_shell)
shell.add_command('reload', _handle_reload_from_shell)
|
def parse_logs(log_list, date, machine_name, log_type):
"""
Parse log file lines in log_type format.
"""
output = []
count = fail = skip = updated = 0
# Check things are setup correctly
try:
machine = Machine.objects.get(name=machine_name)
except Machine.DoesNotExist:
return "ERROR: Couldn't find machine named: %s" % machine_name
if log_type == "alogger":
parser = AloggerParser()
else:
parser = get_parser(log_type)
# Process each line
for line_no, line in enumerate(log_list):
try:
data = parser.line_to_dict(line)
except ValueError:
output.append("%d: Error reading line" % line_no)
continue
# if parser returns None, nothing to do, continue to next line
if data is None:
skip += 1
continue
# check for required fields
required_fields = [
'user', 'project', 'jobid', 'jobname',
'cpu_usage', 'cores',
'act_wall_time', 'est_wall_time',
'mem', 'vmem', 'list_pmem', 'list_mem', 'list_pvmem',
'ctime', 'qtime', 'etime', 'start',
]
for field in required_fields:
if field not in data:
output.append(
"line %d: %s field not given." % (line_no, field))
fail = fail + 1
continue
# Process user --> account
try:
account = Account.objects.get(
username=data['user'],
date_deleted__isnull=True)
except Account.DoesNotExist:
# Couldn't find user account - Assign to user None
output.append(
"line %d: Couldn't find user account for username=%s."
% (line_no, data['user']))
fail += 1
continue
except Account.MultipleObjectsReturned:
output.append(
"line %d: Username %s has multiple active accounts."
% (line_no, data['user']))
fail += 1
continue
# Process project
if data['project'] is None:
output.append(
"line %d: Project was not supplied." % (line_no))
fail += 1
continue
try:
project = Project.objects.get(pid=data['project'])
except Project.DoesNotExist:
output.append(
"line %d: Couldn't find specified project %s"
% (line_no, data['project']))
fail += 1
continue
# memory calculations
if machine.mem_per_core:
avail_mem_per_core = machine.mem_per_core * 1024
avail_mem_for_job = avail_mem_per_core * data['cores']
if data['list_pmem'] * data['cores'] > data['list_mem']:
memory_used_per_core = data['list_pmem']
memory_used_for_job = data['list_pmem'] * data['cores']
else:
memory_used_per_core = data['list_mem'] / data['cores']
memory_used_for_job = data['list_mem']
if memory_used_for_job > avail_mem_for_job:
data['cpu_usage'] = ceil(
memory_used_per_core / avail_mem_per_core
* data['act_wall_time']
* data['cores'])
# apply scaling factor to cpu_usage
data['cpu_usage'] = data['cpu_usage'] * machine.scaling_factor
# Everything is good so add entry
queue, created = Queue.objects.get_or_create(name=data['queue'])
try:
cpujob, created = CPUJob.objects.get_or_create(jobid=data['jobid'])
cpujob.account = account
cpujob.username = data['user']
cpujob.project = project
cpujob.machine = machine
cpujob.date = date
cpujob.queue = queue
cpujob.cpu_usage = data['cpu_usage']
cpujob.est_wall_time = data['est_wall_time']
cpujob.act_wall_time = data['act_wall_time']
cpujob.mem = data['mem']
cpujob.vmem = data['vmem']
cpujob.ctime = data['ctime']
cpujob.qtime = data['qtime']
cpujob.etime = data['etime']
cpujob.start = data['start']
cpujob.cores = data['cores']
cpujob.exit_status = data['exit_status']
cpujob.jobname = data['jobname']
cpujob.list_mem = data['list_mem']
cpujob.list_vmem = data['list_vmem']
cpujob.list_pmem = data['list_pmem']
cpujob.list_pvmem = data['list_pvmem']
cpujob.save()
except Exception as e:
output.append(
"line %d: Failed to insert a line - %s" % (line_no, e))
fail += 1
continue
if created:
count += 1
else:
updated += 1
summary = (
'Inserted : %i\nUpdated : %i\nFailed : %i\nSkiped : %i'
% (count, updated, fail, skip)
)
logger.debug('Inserted : %i' % count)
logger.debug('Updated : %i' % updated)
logger.debug('Failed : %i' % fail)
logger.debug('Skiped : %i' % skip)
return summary, output
|
def function[parse_logs, parameter[log_list, date, machine_name, log_type]]:
constant[
Parse log file lines in log_type format.
]
variable[output] assign[=] list[[]]
variable[count] assign[=] constant[0]
<ast.Try object at 0x7da20c795660>
if compare[name[log_type] equal[==] constant[alogger]] begin[:]
variable[parser] assign[=] call[name[AloggerParser], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da20c7961a0>, <ast.Name object at 0x7da20c796440>]]] in starred[call[name[enumerate], parameter[name[log_list]]]] begin[:]
<ast.Try object at 0x7da20c7943d0>
if compare[name[data] is constant[None]] begin[:]
<ast.AugAssign object at 0x7da20c7950c0>
continue
variable[required_fields] assign[=] list[[<ast.Constant object at 0x7da20c795420>, <ast.Constant object at 0x7da20c795c00>, <ast.Constant object at 0x7da20c7956c0>, <ast.Constant object at 0x7da20c795210>, <ast.Constant object at 0x7da20c7964d0>, <ast.Constant object at 0x7da20c795fc0>, <ast.Constant object at 0x7da20c7952a0>, <ast.Constant object at 0x7da20c795e10>, <ast.Constant object at 0x7da20c794c70>, <ast.Constant object at 0x7da20c796200>, <ast.Constant object at 0x7da20c794880>, <ast.Constant object at 0x7da20c794550>, <ast.Constant object at 0x7da20c796290>, <ast.Constant object at 0x7da20c796080>, <ast.Constant object at 0x7da20c794a60>, <ast.Constant object at 0x7da20c794eb0>, <ast.Constant object at 0x7da20c7954b0>]]
for taget[name[field]] in starred[name[required_fields]] begin[:]
if compare[name[field] <ast.NotIn object at 0x7da2590d7190> name[data]] begin[:]
call[name[output].append, parameter[binary_operation[constant[line %d: %s field not given.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b05f9030>, <ast.Name object at 0x7da1b05f8f40>]]]]]
variable[fail] assign[=] binary_operation[name[fail] + constant[1]]
continue
<ast.Try object at 0x7da1b05f9300>
if compare[call[name[data]][constant[project]] is constant[None]] begin[:]
call[name[output].append, parameter[binary_operation[constant[line %d: Project was not supplied.] <ast.Mod object at 0x7da2590d6920> name[line_no]]]]
<ast.AugAssign object at 0x7da1b05f8cd0>
continue
<ast.Try object at 0x7da1b05fa9e0>
if name[machine].mem_per_core begin[:]
variable[avail_mem_per_core] assign[=] binary_operation[name[machine].mem_per_core * constant[1024]]
variable[avail_mem_for_job] assign[=] binary_operation[name[avail_mem_per_core] * call[name[data]][constant[cores]]]
if compare[binary_operation[call[name[data]][constant[list_pmem]] * call[name[data]][constant[cores]]] greater[>] call[name[data]][constant[list_mem]]] begin[:]
variable[memory_used_per_core] assign[=] call[name[data]][constant[list_pmem]]
variable[memory_used_for_job] assign[=] binary_operation[call[name[data]][constant[list_pmem]] * call[name[data]][constant[cores]]]
if compare[name[memory_used_for_job] greater[>] name[avail_mem_for_job]] begin[:]
call[name[data]][constant[cpu_usage]] assign[=] call[name[ceil], parameter[binary_operation[binary_operation[binary_operation[name[memory_used_per_core] / name[avail_mem_per_core]] * call[name[data]][constant[act_wall_time]]] * call[name[data]][constant[cores]]]]]
call[name[data]][constant[cpu_usage]] assign[=] binary_operation[call[name[data]][constant[cpu_usage]] * name[machine].scaling_factor]
<ast.Tuple object at 0x7da1b05fa650> assign[=] call[name[Queue].objects.get_or_create, parameter[]]
<ast.Try object at 0x7da1b05f9a50>
if name[created] begin[:]
<ast.AugAssign object at 0x7da1b0336020>
variable[summary] assign[=] binary_operation[constant[Inserted : %i
Updated : %i
Failed : %i
Skiped : %i] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0336c20>, <ast.Name object at 0x7da1b0336a40>, <ast.Name object at 0x7da1b03372b0>, <ast.Name object at 0x7da1b0334fa0>]]]
call[name[logger].debug, parameter[binary_operation[constant[Inserted : %i] <ast.Mod object at 0x7da2590d6920> name[count]]]]
call[name[logger].debug, parameter[binary_operation[constant[Updated : %i] <ast.Mod object at 0x7da2590d6920> name[updated]]]]
call[name[logger].debug, parameter[binary_operation[constant[Failed : %i] <ast.Mod object at 0x7da2590d6920> name[fail]]]]
call[name[logger].debug, parameter[binary_operation[constant[Skiped : %i] <ast.Mod object at 0x7da2590d6920> name[skip]]]]
return[tuple[[<ast.Name object at 0x7da1b0240a00>, <ast.Name object at 0x7da1b0240f40>]]]
|
keyword[def] identifier[parse_logs] ( identifier[log_list] , identifier[date] , identifier[machine_name] , identifier[log_type] ):
literal[string]
identifier[output] =[]
identifier[count] = identifier[fail] = identifier[skip] = identifier[updated] = literal[int]
keyword[try] :
identifier[machine] = identifier[Machine] . identifier[objects] . identifier[get] ( identifier[name] = identifier[machine_name] )
keyword[except] identifier[Machine] . identifier[DoesNotExist] :
keyword[return] literal[string] % identifier[machine_name]
keyword[if] identifier[log_type] == literal[string] :
identifier[parser] = identifier[AloggerParser] ()
keyword[else] :
identifier[parser] = identifier[get_parser] ( identifier[log_type] )
keyword[for] identifier[line_no] , identifier[line] keyword[in] identifier[enumerate] ( identifier[log_list] ):
keyword[try] :
identifier[data] = identifier[parser] . identifier[line_to_dict] ( identifier[line] )
keyword[except] identifier[ValueError] :
identifier[output] . identifier[append] ( literal[string] % identifier[line_no] )
keyword[continue]
keyword[if] identifier[data] keyword[is] keyword[None] :
identifier[skip] += literal[int]
keyword[continue]
identifier[required_fields] =[
literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] ,
literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] ,
]
keyword[for] identifier[field] keyword[in] identifier[required_fields] :
keyword[if] identifier[field] keyword[not] keyword[in] identifier[data] :
identifier[output] . identifier[append] (
literal[string] %( identifier[line_no] , identifier[field] ))
identifier[fail] = identifier[fail] + literal[int]
keyword[continue]
keyword[try] :
identifier[account] = identifier[Account] . identifier[objects] . identifier[get] (
identifier[username] = identifier[data] [ literal[string] ],
identifier[date_deleted__isnull] = keyword[True] )
keyword[except] identifier[Account] . identifier[DoesNotExist] :
identifier[output] . identifier[append] (
literal[string]
%( identifier[line_no] , identifier[data] [ literal[string] ]))
identifier[fail] += literal[int]
keyword[continue]
keyword[except] identifier[Account] . identifier[MultipleObjectsReturned] :
identifier[output] . identifier[append] (
literal[string]
%( identifier[line_no] , identifier[data] [ literal[string] ]))
identifier[fail] += literal[int]
keyword[continue]
keyword[if] identifier[data] [ literal[string] ] keyword[is] keyword[None] :
identifier[output] . identifier[append] (
literal[string] %( identifier[line_no] ))
identifier[fail] += literal[int]
keyword[continue]
keyword[try] :
identifier[project] = identifier[Project] . identifier[objects] . identifier[get] ( identifier[pid] = identifier[data] [ literal[string] ])
keyword[except] identifier[Project] . identifier[DoesNotExist] :
identifier[output] . identifier[append] (
literal[string]
%( identifier[line_no] , identifier[data] [ literal[string] ]))
identifier[fail] += literal[int]
keyword[continue]
keyword[if] identifier[machine] . identifier[mem_per_core] :
identifier[avail_mem_per_core] = identifier[machine] . identifier[mem_per_core] * literal[int]
identifier[avail_mem_for_job] = identifier[avail_mem_per_core] * identifier[data] [ literal[string] ]
keyword[if] identifier[data] [ literal[string] ]* identifier[data] [ literal[string] ]> identifier[data] [ literal[string] ]:
identifier[memory_used_per_core] = identifier[data] [ literal[string] ]
identifier[memory_used_for_job] = identifier[data] [ literal[string] ]* identifier[data] [ literal[string] ]
keyword[else] :
identifier[memory_used_per_core] = identifier[data] [ literal[string] ]/ identifier[data] [ literal[string] ]
identifier[memory_used_for_job] = identifier[data] [ literal[string] ]
keyword[if] identifier[memory_used_for_job] > identifier[avail_mem_for_job] :
identifier[data] [ literal[string] ]= identifier[ceil] (
identifier[memory_used_per_core] / identifier[avail_mem_per_core]
* identifier[data] [ literal[string] ]
* identifier[data] [ literal[string] ])
identifier[data] [ literal[string] ]= identifier[data] [ literal[string] ]* identifier[machine] . identifier[scaling_factor]
identifier[queue] , identifier[created] = identifier[Queue] . identifier[objects] . identifier[get_or_create] ( identifier[name] = identifier[data] [ literal[string] ])
keyword[try] :
identifier[cpujob] , identifier[created] = identifier[CPUJob] . identifier[objects] . identifier[get_or_create] ( identifier[jobid] = identifier[data] [ literal[string] ])
identifier[cpujob] . identifier[account] = identifier[account]
identifier[cpujob] . identifier[username] = identifier[data] [ literal[string] ]
identifier[cpujob] . identifier[project] = identifier[project]
identifier[cpujob] . identifier[machine] = identifier[machine]
identifier[cpujob] . identifier[date] = identifier[date]
identifier[cpujob] . identifier[queue] = identifier[queue]
identifier[cpujob] . identifier[cpu_usage] = identifier[data] [ literal[string] ]
identifier[cpujob] . identifier[est_wall_time] = identifier[data] [ literal[string] ]
identifier[cpujob] . identifier[act_wall_time] = identifier[data] [ literal[string] ]
identifier[cpujob] . identifier[mem] = identifier[data] [ literal[string] ]
identifier[cpujob] . identifier[vmem] = identifier[data] [ literal[string] ]
identifier[cpujob] . identifier[ctime] = identifier[data] [ literal[string] ]
identifier[cpujob] . identifier[qtime] = identifier[data] [ literal[string] ]
identifier[cpujob] . identifier[etime] = identifier[data] [ literal[string] ]
identifier[cpujob] . identifier[start] = identifier[data] [ literal[string] ]
identifier[cpujob] . identifier[cores] = identifier[data] [ literal[string] ]
identifier[cpujob] . identifier[exit_status] = identifier[data] [ literal[string] ]
identifier[cpujob] . identifier[jobname] = identifier[data] [ literal[string] ]
identifier[cpujob] . identifier[list_mem] = identifier[data] [ literal[string] ]
identifier[cpujob] . identifier[list_vmem] = identifier[data] [ literal[string] ]
identifier[cpujob] . identifier[list_pmem] = identifier[data] [ literal[string] ]
identifier[cpujob] . identifier[list_pvmem] = identifier[data] [ literal[string] ]
identifier[cpujob] . identifier[save] ()
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[output] . identifier[append] (
literal[string] %( identifier[line_no] , identifier[e] ))
identifier[fail] += literal[int]
keyword[continue]
keyword[if] identifier[created] :
identifier[count] += literal[int]
keyword[else] :
identifier[updated] += literal[int]
identifier[summary] =(
literal[string]
%( identifier[count] , identifier[updated] , identifier[fail] , identifier[skip] )
)
identifier[logger] . identifier[debug] ( literal[string] % identifier[count] )
identifier[logger] . identifier[debug] ( literal[string] % identifier[updated] )
identifier[logger] . identifier[debug] ( literal[string] % identifier[fail] )
identifier[logger] . identifier[debug] ( literal[string] % identifier[skip] )
keyword[return] identifier[summary] , identifier[output]
|
def parse_logs(log_list, date, machine_name, log_type):
"""
Parse log file lines in log_type format.
"""
output = []
count = fail = skip = updated = 0
# Check things are setup correctly
try:
machine = Machine.objects.get(name=machine_name) # depends on [control=['try'], data=[]]
except Machine.DoesNotExist:
return "ERROR: Couldn't find machine named: %s" % machine_name # depends on [control=['except'], data=[]]
if log_type == 'alogger':
parser = AloggerParser() # depends on [control=['if'], data=[]]
else:
parser = get_parser(log_type)
# Process each line
for (line_no, line) in enumerate(log_list):
try:
data = parser.line_to_dict(line) # depends on [control=['try'], data=[]]
except ValueError:
output.append('%d: Error reading line' % line_no)
continue # depends on [control=['except'], data=[]]
# if parser returns None, nothing to do, continue to next line
if data is None:
skip += 1
continue # depends on [control=['if'], data=[]]
# check for required fields
required_fields = ['user', 'project', 'jobid', 'jobname', 'cpu_usage', 'cores', 'act_wall_time', 'est_wall_time', 'mem', 'vmem', 'list_pmem', 'list_mem', 'list_pvmem', 'ctime', 'qtime', 'etime', 'start']
for field in required_fields:
if field not in data:
output.append('line %d: %s field not given.' % (line_no, field))
fail = fail + 1
continue # depends on [control=['if'], data=['field']] # depends on [control=['for'], data=['field']]
# Process user --> account
try:
account = Account.objects.get(username=data['user'], date_deleted__isnull=True) # depends on [control=['try'], data=[]]
except Account.DoesNotExist:
# Couldn't find user account - Assign to user None
output.append("line %d: Couldn't find user account for username=%s." % (line_no, data['user']))
fail += 1
continue # depends on [control=['except'], data=[]]
except Account.MultipleObjectsReturned:
output.append('line %d: Username %s has multiple active accounts.' % (line_no, data['user']))
fail += 1
continue # depends on [control=['except'], data=[]]
# Process project
if data['project'] is None:
output.append('line %d: Project was not supplied.' % line_no)
fail += 1
continue # depends on [control=['if'], data=[]]
try:
project = Project.objects.get(pid=data['project']) # depends on [control=['try'], data=[]]
except Project.DoesNotExist:
output.append("line %d: Couldn't find specified project %s" % (line_no, data['project']))
fail += 1
continue # depends on [control=['except'], data=[]]
# memory calculations
if machine.mem_per_core:
avail_mem_per_core = machine.mem_per_core * 1024
avail_mem_for_job = avail_mem_per_core * data['cores']
if data['list_pmem'] * data['cores'] > data['list_mem']:
memory_used_per_core = data['list_pmem']
memory_used_for_job = data['list_pmem'] * data['cores'] # depends on [control=['if'], data=[]]
else:
memory_used_per_core = data['list_mem'] / data['cores']
memory_used_for_job = data['list_mem']
if memory_used_for_job > avail_mem_for_job:
data['cpu_usage'] = ceil(memory_used_per_core / avail_mem_per_core * data['act_wall_time'] * data['cores']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# apply scaling factor to cpu_usage
data['cpu_usage'] = data['cpu_usage'] * machine.scaling_factor
# Everything is good so add entry
(queue, created) = Queue.objects.get_or_create(name=data['queue'])
try:
(cpujob, created) = CPUJob.objects.get_or_create(jobid=data['jobid'])
cpujob.account = account
cpujob.username = data['user']
cpujob.project = project
cpujob.machine = machine
cpujob.date = date
cpujob.queue = queue
cpujob.cpu_usage = data['cpu_usage']
cpujob.est_wall_time = data['est_wall_time']
cpujob.act_wall_time = data['act_wall_time']
cpujob.mem = data['mem']
cpujob.vmem = data['vmem']
cpujob.ctime = data['ctime']
cpujob.qtime = data['qtime']
cpujob.etime = data['etime']
cpujob.start = data['start']
cpujob.cores = data['cores']
cpujob.exit_status = data['exit_status']
cpujob.jobname = data['jobname']
cpujob.list_mem = data['list_mem']
cpujob.list_vmem = data['list_vmem']
cpujob.list_pmem = data['list_pmem']
cpujob.list_pvmem = data['list_pvmem']
cpujob.save() # depends on [control=['try'], data=[]]
except Exception as e:
output.append('line %d: Failed to insert a line - %s' % (line_no, e))
fail += 1
continue # depends on [control=['except'], data=['e']]
if created:
count += 1 # depends on [control=['if'], data=[]]
else:
updated += 1 # depends on [control=['for'], data=[]]
summary = 'Inserted : %i\nUpdated : %i\nFailed : %i\nSkiped : %i' % (count, updated, fail, skip)
logger.debug('Inserted : %i' % count)
logger.debug('Updated : %i' % updated)
logger.debug('Failed : %i' % fail)
logger.debug('Skiped : %i' % skip)
return (summary, output)
|
def from_csv(cls, path, folder, csv_fname, bs=64, tfms=(None,None),
val_idxs=None, suffix='', test_name=None, continuous=False, skip_header=True, num_workers=8, cat_separator=' '):
""" Read in images and their labels given as a CSV file.
This method should be used when training image labels are given in an CSV file as opposed to
sub-directories with label names.
Arguments:
path: a root path of the data (used for storing trained models, precomputed values, etc)
folder: a name of the folder in which training images are contained.
csv_fname: a name of the CSV file which contains target labels.
bs: batch size
tfms: transformations (for data augmentations). e.g. output of `tfms_from_model`
val_idxs: index of images to be used for validation. e.g. output of `get_cv_idxs`.
If None, default arguments to get_cv_idxs are used.
suffix: suffix to add to image names in CSV file (sometimes CSV only contains the file name without file
extension e.g. '.jpg' - in which case, you can set suffix as '.jpg')
test_name: a name of the folder which contains test images.
continuous: if True, the data set is used to train regression models. If False, it is used
to train classification models.
skip_header: skip the first row of the CSV file.
num_workers: number of workers
cat_separator: Labels category separator
Returns:
ImageClassifierData
"""
assert not (tfms[0] is None or tfms[1] is None), "please provide transformations for your train and validation sets"
assert not (os.path.isabs(folder)), "folder needs to be a relative path"
fnames,y,classes = csv_source(folder, csv_fname, skip_header, suffix, continuous=continuous, cat_separator=cat_separator)
return cls.from_names_and_array(path, fnames, y, classes, val_idxs, test_name,
num_workers=num_workers, suffix=suffix, tfms=tfms, bs=bs, continuous=continuous)
|
def function[from_csv, parameter[cls, path, folder, csv_fname, bs, tfms, val_idxs, suffix, test_name, continuous, skip_header, num_workers, cat_separator]]:
constant[ Read in images and their labels given as a CSV file.
This method should be used when training image labels are given in an CSV file as opposed to
sub-directories with label names.
Arguments:
path: a root path of the data (used for storing trained models, precomputed values, etc)
folder: a name of the folder in which training images are contained.
csv_fname: a name of the CSV file which contains target labels.
bs: batch size
tfms: transformations (for data augmentations). e.g. output of `tfms_from_model`
val_idxs: index of images to be used for validation. e.g. output of `get_cv_idxs`.
If None, default arguments to get_cv_idxs are used.
suffix: suffix to add to image names in CSV file (sometimes CSV only contains the file name without file
extension e.g. '.jpg' - in which case, you can set suffix as '.jpg')
test_name: a name of the folder which contains test images.
continuous: if True, the data set is used to train regression models. If False, it is used
to train classification models.
skip_header: skip the first row of the CSV file.
num_workers: number of workers
cat_separator: Labels category separator
Returns:
ImageClassifierData
]
assert[<ast.UnaryOp object at 0x7da1b202b970>]
assert[<ast.UnaryOp object at 0x7da1b202a4d0>]
<ast.Tuple object at 0x7da1b2029690> assign[=] call[name[csv_source], parameter[name[folder], name[csv_fname], name[skip_header], name[suffix]]]
return[call[name[cls].from_names_and_array, parameter[name[path], name[fnames], name[y], name[classes], name[val_idxs], name[test_name]]]]
|
keyword[def] identifier[from_csv] ( identifier[cls] , identifier[path] , identifier[folder] , identifier[csv_fname] , identifier[bs] = literal[int] , identifier[tfms] =( keyword[None] , keyword[None] ),
identifier[val_idxs] = keyword[None] , identifier[suffix] = literal[string] , identifier[test_name] = keyword[None] , identifier[continuous] = keyword[False] , identifier[skip_header] = keyword[True] , identifier[num_workers] = literal[int] , identifier[cat_separator] = literal[string] ):
literal[string]
keyword[assert] keyword[not] ( identifier[tfms] [ literal[int] ] keyword[is] keyword[None] keyword[or] identifier[tfms] [ literal[int] ] keyword[is] keyword[None] ), literal[string]
keyword[assert] keyword[not] ( identifier[os] . identifier[path] . identifier[isabs] ( identifier[folder] )), literal[string]
identifier[fnames] , identifier[y] , identifier[classes] = identifier[csv_source] ( identifier[folder] , identifier[csv_fname] , identifier[skip_header] , identifier[suffix] , identifier[continuous] = identifier[continuous] , identifier[cat_separator] = identifier[cat_separator] )
keyword[return] identifier[cls] . identifier[from_names_and_array] ( identifier[path] , identifier[fnames] , identifier[y] , identifier[classes] , identifier[val_idxs] , identifier[test_name] ,
identifier[num_workers] = identifier[num_workers] , identifier[suffix] = identifier[suffix] , identifier[tfms] = identifier[tfms] , identifier[bs] = identifier[bs] , identifier[continuous] = identifier[continuous] )
|
def from_csv(cls, path, folder, csv_fname, bs=64, tfms=(None, None), val_idxs=None, suffix='', test_name=None, continuous=False, skip_header=True, num_workers=8, cat_separator=' '):
""" Read in images and their labels given as a CSV file.
This method should be used when training image labels are given in an CSV file as opposed to
sub-directories with label names.
Arguments:
path: a root path of the data (used for storing trained models, precomputed values, etc)
folder: a name of the folder in which training images are contained.
csv_fname: a name of the CSV file which contains target labels.
bs: batch size
tfms: transformations (for data augmentations). e.g. output of `tfms_from_model`
val_idxs: index of images to be used for validation. e.g. output of `get_cv_idxs`.
If None, default arguments to get_cv_idxs are used.
suffix: suffix to add to image names in CSV file (sometimes CSV only contains the file name without file
extension e.g. '.jpg' - in which case, you can set suffix as '.jpg')
test_name: a name of the folder which contains test images.
continuous: if True, the data set is used to train regression models. If False, it is used
to train classification models.
skip_header: skip the first row of the CSV file.
num_workers: number of workers
cat_separator: Labels category separator
Returns:
ImageClassifierData
"""
assert not (tfms[0] is None or tfms[1] is None), 'please provide transformations for your train and validation sets'
assert not os.path.isabs(folder), 'folder needs to be a relative path'
(fnames, y, classes) = csv_source(folder, csv_fname, skip_header, suffix, continuous=continuous, cat_separator=cat_separator)
return cls.from_names_and_array(path, fnames, y, classes, val_idxs, test_name, num_workers=num_workers, suffix=suffix, tfms=tfms, bs=bs, continuous=continuous)
|
def _render_dendrogram(dnd, ax, displacement):
'''Renders dendrogram'''
# set of unique colors that reflect the set of types of the neurites
colors = set()
for n, (indices, ctype) in enumerate(zip(dnd.groups, dnd.types)):
# slice rectangles array for the current neurite
group = dnd.data[indices[0]:indices[1]]
if n > 0:
# displace the neurites by half of their maximum x dimension
# plus half of the previous neurite's maxmimum x dimension
displacement += 0.5 * (dnd.dims[n - 1][0] + dnd.dims[n][0])
# arrange the trees without overlapping with each other
group += (displacement, 0.)
# create the polygonal collection of the dendrogram
# segments
_generate_collection(group, ax, ctype, colors)
soma_square = dnd.soma
if soma_square is not None:
_generate_collection((soma_square + (displacement / 2., 0.),), ax, NeuriteType.soma, colors)
ax.plot((displacement / 2., displacement), (0., 0.), color='k')
ax.plot((0., displacement / 2.), (0., 0.), color='k')
return displacement
|
def function[_render_dendrogram, parameter[dnd, ax, displacement]]:
constant[Renders dendrogram]
variable[colors] assign[=] call[name[set], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18fe93c40>, <ast.Tuple object at 0x7da18fe93fa0>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[dnd].groups, name[dnd].types]]]]] begin[:]
variable[group] assign[=] call[name[dnd].data][<ast.Slice object at 0x7da18fe910c0>]
if compare[name[n] greater[>] constant[0]] begin[:]
<ast.AugAssign object at 0x7da18fe91cf0>
<ast.AugAssign object at 0x7da18fe92470>
call[name[_generate_collection], parameter[name[group], name[ax], name[ctype], name[colors]]]
variable[soma_square] assign[=] name[dnd].soma
if compare[name[soma_square] is_not constant[None]] begin[:]
call[name[_generate_collection], parameter[tuple[[<ast.BinOp object at 0x7da18fe92b90>]], name[ax], name[NeuriteType].soma, name[colors]]]
call[name[ax].plot, parameter[tuple[[<ast.BinOp object at 0x7da18bc70490>, <ast.Name object at 0x7da18bc70eb0>]], tuple[[<ast.Constant object at 0x7da18bc73ac0>, <ast.Constant object at 0x7da18bc70d00>]]]]
call[name[ax].plot, parameter[tuple[[<ast.Constant object at 0x7da18bc71d80>, <ast.BinOp object at 0x7da18bc70550>]], tuple[[<ast.Constant object at 0x7da18bc70700>, <ast.Constant object at 0x7da18bc73d00>]]]]
return[name[displacement]]
|
keyword[def] identifier[_render_dendrogram] ( identifier[dnd] , identifier[ax] , identifier[displacement] ):
literal[string]
identifier[colors] = identifier[set] ()
keyword[for] identifier[n] ,( identifier[indices] , identifier[ctype] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[dnd] . identifier[groups] , identifier[dnd] . identifier[types] )):
identifier[group] = identifier[dnd] . identifier[data] [ identifier[indices] [ literal[int] ]: identifier[indices] [ literal[int] ]]
keyword[if] identifier[n] > literal[int] :
identifier[displacement] += literal[int] *( identifier[dnd] . identifier[dims] [ identifier[n] - literal[int] ][ literal[int] ]+ identifier[dnd] . identifier[dims] [ identifier[n] ][ literal[int] ])
identifier[group] +=( identifier[displacement] , literal[int] )
identifier[_generate_collection] ( identifier[group] , identifier[ax] , identifier[ctype] , identifier[colors] )
identifier[soma_square] = identifier[dnd] . identifier[soma]
keyword[if] identifier[soma_square] keyword[is] keyword[not] keyword[None] :
identifier[_generate_collection] (( identifier[soma_square] +( identifier[displacement] / literal[int] , literal[int] ),), identifier[ax] , identifier[NeuriteType] . identifier[soma] , identifier[colors] )
identifier[ax] . identifier[plot] (( identifier[displacement] / literal[int] , identifier[displacement] ),( literal[int] , literal[int] ), identifier[color] = literal[string] )
identifier[ax] . identifier[plot] (( literal[int] , identifier[displacement] / literal[int] ),( literal[int] , literal[int] ), identifier[color] = literal[string] )
keyword[return] identifier[displacement]
|
def _render_dendrogram(dnd, ax, displacement):
"""Renders dendrogram"""
# set of unique colors that reflect the set of types of the neurites
colors = set()
for (n, (indices, ctype)) in enumerate(zip(dnd.groups, dnd.types)):
# slice rectangles array for the current neurite
group = dnd.data[indices[0]:indices[1]]
if n > 0:
# displace the neurites by half of their maximum x dimension
# plus half of the previous neurite's maxmimum x dimension
displacement += 0.5 * (dnd.dims[n - 1][0] + dnd.dims[n][0]) # depends on [control=['if'], data=['n']]
# arrange the trees without overlapping with each other
group += (displacement, 0.0)
# create the polygonal collection of the dendrogram
# segments
_generate_collection(group, ax, ctype, colors) # depends on [control=['for'], data=[]]
soma_square = dnd.soma
if soma_square is not None:
_generate_collection((soma_square + (displacement / 2.0, 0.0),), ax, NeuriteType.soma, colors)
ax.plot((displacement / 2.0, displacement), (0.0, 0.0), color='k')
ax.plot((0.0, displacement / 2.0), (0.0, 0.0), color='k') # depends on [control=['if'], data=['soma_square']]
return displacement
|
def clean(self, value):
"""Cleans and returns the given value, or raises a ParameterNotValidError exception"""
if isinstance(value, numbers.Number):
return value
elif isinstance(value, six.string_types):
try:
value = float(value)
return int(value) if value.is_integer() else value
except ValueError:
raise ParameterNotValidError
raise ParameterNotValidError
|
def function[clean, parameter[self, value]]:
constant[Cleans and returns the given value, or raises a ParameterNotValidError exception]
if call[name[isinstance], parameter[name[value], name[numbers].Number]] begin[:]
return[name[value]]
<ast.Raise object at 0x7da2054a73a0>
|
keyword[def] identifier[clean] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[numbers] . identifier[Number] ):
keyword[return] identifier[value]
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[six] . identifier[string_types] ):
keyword[try] :
identifier[value] = identifier[float] ( identifier[value] )
keyword[return] identifier[int] ( identifier[value] ) keyword[if] identifier[value] . identifier[is_integer] () keyword[else] identifier[value]
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ParameterNotValidError]
keyword[raise] identifier[ParameterNotValidError]
|
def clean(self, value):
"""Cleans and returns the given value, or raises a ParameterNotValidError exception"""
if isinstance(value, numbers.Number):
return value # depends on [control=['if'], data=[]]
elif isinstance(value, six.string_types):
try:
value = float(value)
return int(value) if value.is_integer() else value # depends on [control=['try'], data=[]]
except ValueError:
raise ParameterNotValidError # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
raise ParameterNotValidError
|
def get_rec_column_descr(self, colnum, vstorage):
"""
Get a descriptor entry for the specified column.
parameters
----------
colnum: integer
The column number, 0 offset
vstorage: string
See docs in read_columns
"""
npy_type, isvar, istbit = self._get_tbl_numpy_dtype(colnum)
name = self._info['colinfo'][colnum]['name']
if isvar:
if vstorage == 'object':
descr = (name, 'O')
else:
tform = self._info['colinfo'][colnum]['tform']
max_size = _extract_vararray_max(tform)
if max_size <= 0:
name = self._info['colinfo'][colnum]['name']
mess = 'Will read as an object field'
if max_size < 0:
mess = "Column '%s': No maximum size: '%s'. %s"
mess = mess % (name, tform, mess)
warnings.warn(mess, FITSRuntimeWarning)
else:
mess = "Column '%s': Max size is zero: '%s'. %s"
mess = mess % (name, tform, mess)
warnings.warn(mess, FITSRuntimeWarning)
# we are forced to read this as an object array
return self.get_rec_column_descr(colnum, 'object')
if npy_type[0] == 'S':
# variable length string columns cannot
# themselves be arrays I don't think
npy_type = 'S%d' % max_size
descr = (name, npy_type)
elif npy_type[0] == 'U':
# variable length string columns cannot
# themselves be arrays I don't think
npy_type = 'U%d' % max_size
descr = (name, npy_type)
else:
descr = (name, npy_type, max_size)
else:
tdim = self._info['colinfo'][colnum]['tdim']
shape = _tdim2shape(
tdim, name,
is_string=(npy_type[0] == 'S' or npy_type[0] == 'U'))
if shape is not None:
descr = (name, npy_type, shape)
else:
descr = (name, npy_type)
return descr, isvar
|
def function[get_rec_column_descr, parameter[self, colnum, vstorage]]:
constant[
Get a descriptor entry for the specified column.
parameters
----------
colnum: integer
The column number, 0 offset
vstorage: string
See docs in read_columns
]
<ast.Tuple object at 0x7da18ede6440> assign[=] call[name[self]._get_tbl_numpy_dtype, parameter[name[colnum]]]
variable[name] assign[=] call[call[call[name[self]._info][constant[colinfo]]][name[colnum]]][constant[name]]
if name[isvar] begin[:]
if compare[name[vstorage] equal[==] constant[object]] begin[:]
variable[descr] assign[=] tuple[[<ast.Name object at 0x7da18ede4f70>, <ast.Constant object at 0x7da18ede7220>]]
return[tuple[[<ast.Name object at 0x7da20c7c81f0>, <ast.Name object at 0x7da20c7cac80>]]]
|
keyword[def] identifier[get_rec_column_descr] ( identifier[self] , identifier[colnum] , identifier[vstorage] ):
literal[string]
identifier[npy_type] , identifier[isvar] , identifier[istbit] = identifier[self] . identifier[_get_tbl_numpy_dtype] ( identifier[colnum] )
identifier[name] = identifier[self] . identifier[_info] [ literal[string] ][ identifier[colnum] ][ literal[string] ]
keyword[if] identifier[isvar] :
keyword[if] identifier[vstorage] == literal[string] :
identifier[descr] =( identifier[name] , literal[string] )
keyword[else] :
identifier[tform] = identifier[self] . identifier[_info] [ literal[string] ][ identifier[colnum] ][ literal[string] ]
identifier[max_size] = identifier[_extract_vararray_max] ( identifier[tform] )
keyword[if] identifier[max_size] <= literal[int] :
identifier[name] = identifier[self] . identifier[_info] [ literal[string] ][ identifier[colnum] ][ literal[string] ]
identifier[mess] = literal[string]
keyword[if] identifier[max_size] < literal[int] :
identifier[mess] = literal[string]
identifier[mess] = identifier[mess] %( identifier[name] , identifier[tform] , identifier[mess] )
identifier[warnings] . identifier[warn] ( identifier[mess] , identifier[FITSRuntimeWarning] )
keyword[else] :
identifier[mess] = literal[string]
identifier[mess] = identifier[mess] %( identifier[name] , identifier[tform] , identifier[mess] )
identifier[warnings] . identifier[warn] ( identifier[mess] , identifier[FITSRuntimeWarning] )
keyword[return] identifier[self] . identifier[get_rec_column_descr] ( identifier[colnum] , literal[string] )
keyword[if] identifier[npy_type] [ literal[int] ]== literal[string] :
identifier[npy_type] = literal[string] % identifier[max_size]
identifier[descr] =( identifier[name] , identifier[npy_type] )
keyword[elif] identifier[npy_type] [ literal[int] ]== literal[string] :
identifier[npy_type] = literal[string] % identifier[max_size]
identifier[descr] =( identifier[name] , identifier[npy_type] )
keyword[else] :
identifier[descr] =( identifier[name] , identifier[npy_type] , identifier[max_size] )
keyword[else] :
identifier[tdim] = identifier[self] . identifier[_info] [ literal[string] ][ identifier[colnum] ][ literal[string] ]
identifier[shape] = identifier[_tdim2shape] (
identifier[tdim] , identifier[name] ,
identifier[is_string] =( identifier[npy_type] [ literal[int] ]== literal[string] keyword[or] identifier[npy_type] [ literal[int] ]== literal[string] ))
keyword[if] identifier[shape] keyword[is] keyword[not] keyword[None] :
identifier[descr] =( identifier[name] , identifier[npy_type] , identifier[shape] )
keyword[else] :
identifier[descr] =( identifier[name] , identifier[npy_type] )
keyword[return] identifier[descr] , identifier[isvar]
|
def get_rec_column_descr(self, colnum, vstorage):
"""
Get a descriptor entry for the specified column.
parameters
----------
colnum: integer
The column number, 0 offset
vstorage: string
See docs in read_columns
"""
(npy_type, isvar, istbit) = self._get_tbl_numpy_dtype(colnum)
name = self._info['colinfo'][colnum]['name']
if isvar:
if vstorage == 'object':
descr = (name, 'O') # depends on [control=['if'], data=[]]
else:
tform = self._info['colinfo'][colnum]['tform']
max_size = _extract_vararray_max(tform)
if max_size <= 0:
name = self._info['colinfo'][colnum]['name']
mess = 'Will read as an object field'
if max_size < 0:
mess = "Column '%s': No maximum size: '%s'. %s"
mess = mess % (name, tform, mess)
warnings.warn(mess, FITSRuntimeWarning) # depends on [control=['if'], data=[]]
else:
mess = "Column '%s': Max size is zero: '%s'. %s"
mess = mess % (name, tform, mess)
warnings.warn(mess, FITSRuntimeWarning)
# we are forced to read this as an object array
return self.get_rec_column_descr(colnum, 'object') # depends on [control=['if'], data=['max_size']]
if npy_type[0] == 'S':
# variable length string columns cannot
# themselves be arrays I don't think
npy_type = 'S%d' % max_size
descr = (name, npy_type) # depends on [control=['if'], data=[]]
elif npy_type[0] == 'U':
# variable length string columns cannot
# themselves be arrays I don't think
npy_type = 'U%d' % max_size
descr = (name, npy_type) # depends on [control=['if'], data=[]]
else:
descr = (name, npy_type, max_size) # depends on [control=['if'], data=[]]
else:
tdim = self._info['colinfo'][colnum]['tdim']
shape = _tdim2shape(tdim, name, is_string=npy_type[0] == 'S' or npy_type[0] == 'U')
if shape is not None:
descr = (name, npy_type, shape) # depends on [control=['if'], data=['shape']]
else:
descr = (name, npy_type)
return (descr, isvar)
|
def mdprint(*values, plain=None, **options):
""" Convert HTML to VTML and then print it.
Follows same semantics as vtmlprint. """
print(*[mdrender(x, plain=plain) for x in values], **options)
|
def function[mdprint, parameter[]]:
constant[ Convert HTML to VTML and then print it.
Follows same semantics as vtmlprint. ]
call[name[print], parameter[<ast.Starred object at 0x7da1b143f5e0>]]
|
keyword[def] identifier[mdprint] (* identifier[values] , identifier[plain] = keyword[None] ,** identifier[options] ):
literal[string]
identifier[print] (*[ identifier[mdrender] ( identifier[x] , identifier[plain] = identifier[plain] ) keyword[for] identifier[x] keyword[in] identifier[values] ],** identifier[options] )
|
def mdprint(*values, plain=None, **options):
""" Convert HTML to VTML and then print it.
Follows same semantics as vtmlprint. """
print(*[mdrender(x, plain=plain) for x in values], **options)
|
def take_home_pay(gross_pay, employer_match, taxes_and_fees, numtype='float'):
"""
Calculate net take-home pay including employer retirement savings match
using the formula laid out by Mr. Money Mustache:
http://www.mrmoneymustache.com/2015/01/26/calculating-net-worth/
Args:
gross_pay: float or int, gross monthly pay.
employer_match: float or int, the 401(k) match from your employer.
taxes_and_fees: list, taxes and fees that are deducted from your paycheck.
numtype: string, 'decimal' or 'float'; the type of number to return.
Returns:
your monthly take-home pay.
"""
if numtype == 'decimal':
return (Decimal(gross_pay) + Decimal(employer_match)) - Decimal(
sum(taxes_and_fees)
)
else:
return (float(gross_pay) + float(employer_match)) - sum(taxes_and_fees)
|
def function[take_home_pay, parameter[gross_pay, employer_match, taxes_and_fees, numtype]]:
constant[
Calculate net take-home pay including employer retirement savings match
using the formula laid out by Mr. Money Mustache:
http://www.mrmoneymustache.com/2015/01/26/calculating-net-worth/
Args:
gross_pay: float or int, gross monthly pay.
employer_match: float or int, the 401(k) match from your employer.
taxes_and_fees: list, taxes and fees that are deducted from your paycheck.
numtype: string, 'decimal' or 'float'; the type of number to return.
Returns:
your monthly take-home pay.
]
if compare[name[numtype] equal[==] constant[decimal]] begin[:]
return[binary_operation[binary_operation[call[name[Decimal], parameter[name[gross_pay]]] + call[name[Decimal], parameter[name[employer_match]]]] - call[name[Decimal], parameter[call[name[sum], parameter[name[taxes_and_fees]]]]]]]
|
keyword[def] identifier[take_home_pay] ( identifier[gross_pay] , identifier[employer_match] , identifier[taxes_and_fees] , identifier[numtype] = literal[string] ):
literal[string]
keyword[if] identifier[numtype] == literal[string] :
keyword[return] ( identifier[Decimal] ( identifier[gross_pay] )+ identifier[Decimal] ( identifier[employer_match] ))- identifier[Decimal] (
identifier[sum] ( identifier[taxes_and_fees] )
)
keyword[else] :
keyword[return] ( identifier[float] ( identifier[gross_pay] )+ identifier[float] ( identifier[employer_match] ))- identifier[sum] ( identifier[taxes_and_fees] )
|
def take_home_pay(gross_pay, employer_match, taxes_and_fees, numtype='float'):
"""
Calculate net take-home pay including employer retirement savings match
using the formula laid out by Mr. Money Mustache:
http://www.mrmoneymustache.com/2015/01/26/calculating-net-worth/
Args:
gross_pay: float or int, gross monthly pay.
employer_match: float or int, the 401(k) match from your employer.
taxes_and_fees: list, taxes and fees that are deducted from your paycheck.
numtype: string, 'decimal' or 'float'; the type of number to return.
Returns:
your monthly take-home pay.
"""
if numtype == 'decimal':
return Decimal(gross_pay) + Decimal(employer_match) - Decimal(sum(taxes_and_fees)) # depends on [control=['if'], data=[]]
else:
return float(gross_pay) + float(employer_match) - sum(taxes_and_fees)
|
def parse_chars(bels: list, errors: Errors) -> Tuple[CharLocs, Errors]:
"""Scan BEL string to map parens, quotes, commas
Args:
bels: bel string as an array of characters
errors: list of error tuples ('<type>', '<msg>')
Returns:
(char_locs, errors): character locations and errors
"""
pstack, qstack, nested_pstack = [], [], []
parens, nested_parens, quotes, commas = {}, {}, {}, {}
notquoted_flag = True
for i, c in enumerate(bels):
prior_char = i - 1
# print('BEL', prior_char, b[prior_char])
# Find starting quote
if c == '"' and bels[prior_char] != "\\" and len(qstack) == 0:
qstack.append(i)
notquoted_flag = False
# Find closing quote
elif c == '"' and bels[prior_char] != "\\":
quotes[qstack.pop()] = i
notquoted_flag = True
# Find all escaped quotes outside of quoted string
elif c == '"' and bels[prior_char] == "\\" and len(qstack) == 0:
errors.append(
(
"ERROR",
f"Escaped quote outside of quoted string at location: {i - 1}",
(i - 1, i - 1),
)
)
# Find all nested object opening parens
elif notquoted_flag and c == "(" and bels[prior_char] == " ":
if len(nested_pstack) > 1:
errors.append(
(
"ERROR",
f"More than one nested parenthesis or left parenthesis following a space character",
(i, i),
)
)
nested_pstack.append(i)
# Find all opening parens
elif notquoted_flag and c == "(" and bels[prior_char] not in ["\\"]:
pstack.append(i)
# Find all closing parens
elif notquoted_flag and c == ")" and bels[prior_char] != "\\":
if len(pstack):
if len(pstack) > 1:
parens[pstack.pop()] = (i, "child")
else:
parens[pstack.pop()] = (i, "top")
elif len(nested_pstack):
nested_parens[nested_pstack.pop()] = (i, "top")
else:
errors.append(
(
"ERROR",
f"Missing left parenthesis for right parenthesis at location {i}",
(i, i),
)
)
# Find comma outside of quoted string
elif notquoted_flag and c == "," and len(qstack) == 0:
sparen = pstack[-1]
if sparen not in commas:
commas[sparen] = [i]
else:
commas[sparen].append(i)
while len(pstack):
errors.append(
(
"ERROR",
f"Missing right parenthesis for left parenthesis at location {pstack[-1]}",
(pstack[-1], pstack[-1]),
)
)
if len(pstack) > 1:
parens[pstack.pop()] = (-1, "child")
else:
parens[pstack.pop()] = (-1, "top")
while len(nested_pstack):
errors.append(
(
"ERROR",
f"Missing right parenthesis for nested object left parenthesis at location {nested_pstack[-1]}",
(nested_pstack[-1], nested_pstack[-1]),
)
)
nested_parens[nested_pstack.pop()] = (-1, "top")
if len(qstack):
missing_quote = qstack.pop()
errors.append(
(
"ERROR",
f"Missing right quote for left quote at location {missing_quote}",
(missing_quote, missing_quote),
)
)
return (
{
"parens": parens,
"nested_parens": nested_parens,
"quotes": quotes,
"commas": commas,
},
errors,
)
|
def function[parse_chars, parameter[bels, errors]]:
constant[Scan BEL string to map parens, quotes, commas
Args:
bels: bel string as an array of characters
errors: list of error tuples ('<type>', '<msg>')
Returns:
(char_locs, errors): character locations and errors
]
<ast.Tuple object at 0x7da1b1968c70> assign[=] tuple[[<ast.List object at 0x7da1b196afe0>, <ast.List object at 0x7da1b19694e0>, <ast.List object at 0x7da1b196a8c0>]]
<ast.Tuple object at 0x7da1b196b640> assign[=] tuple[[<ast.Dict object at 0x7da1b196b010>, <ast.Dict object at 0x7da1b196be20>, <ast.Dict object at 0x7da1b196b6a0>, <ast.Dict object at 0x7da1b1968850>]]
variable[notquoted_flag] assign[=] constant[True]
for taget[tuple[[<ast.Name object at 0x7da1b196a2f0>, <ast.Name object at 0x7da1b1968d90>]]] in starred[call[name[enumerate], parameter[name[bels]]]] begin[:]
variable[prior_char] assign[=] binary_operation[name[i] - constant[1]]
if <ast.BoolOp object at 0x7da1b196ad70> begin[:]
call[name[qstack].append, parameter[name[i]]]
variable[notquoted_flag] assign[=] constant[False]
while call[name[len], parameter[name[pstack]]] begin[:]
call[name[errors].append, parameter[tuple[[<ast.Constant object at 0x7da1b18772b0>, <ast.JoinedStr object at 0x7da1b1877cd0>, <ast.Tuple object at 0x7da1b18755a0>]]]]
if compare[call[name[len], parameter[name[pstack]]] greater[>] constant[1]] begin[:]
call[name[parens]][call[name[pstack].pop, parameter[]]] assign[=] tuple[[<ast.UnaryOp object at 0x7da1b1876470>, <ast.Constant object at 0x7da1b1874d60>]]
while call[name[len], parameter[name[nested_pstack]]] begin[:]
call[name[errors].append, parameter[tuple[[<ast.Constant object at 0x7da1b1874730>, <ast.JoinedStr object at 0x7da1b18745e0>, <ast.Tuple object at 0x7da1b1875a80>]]]]
call[name[nested_parens]][call[name[nested_pstack].pop, parameter[]]] assign[=] tuple[[<ast.UnaryOp object at 0x7da1b1875480>, <ast.Constant object at 0x7da1b1874d30>]]
if call[name[len], parameter[name[qstack]]] begin[:]
variable[missing_quote] assign[=] call[name[qstack].pop, parameter[]]
call[name[errors].append, parameter[tuple[[<ast.Constant object at 0x7da1b1876260>, <ast.JoinedStr object at 0x7da1b1876b90>, <ast.Tuple object at 0x7da1b1877eb0>]]]]
return[tuple[[<ast.Dict object at 0x7da1b1875630>, <ast.Name object at 0x7da1b1877e50>]]]
|
keyword[def] identifier[parse_chars] ( identifier[bels] : identifier[list] , identifier[errors] : identifier[Errors] )-> identifier[Tuple] [ identifier[CharLocs] , identifier[Errors] ]:
literal[string]
identifier[pstack] , identifier[qstack] , identifier[nested_pstack] =[],[],[]
identifier[parens] , identifier[nested_parens] , identifier[quotes] , identifier[commas] ={},{},{},{}
identifier[notquoted_flag] = keyword[True]
keyword[for] identifier[i] , identifier[c] keyword[in] identifier[enumerate] ( identifier[bels] ):
identifier[prior_char] = identifier[i] - literal[int]
keyword[if] identifier[c] == literal[string] keyword[and] identifier[bels] [ identifier[prior_char] ]!= literal[string] keyword[and] identifier[len] ( identifier[qstack] )== literal[int] :
identifier[qstack] . identifier[append] ( identifier[i] )
identifier[notquoted_flag] = keyword[False]
keyword[elif] identifier[c] == literal[string] keyword[and] identifier[bels] [ identifier[prior_char] ]!= literal[string] :
identifier[quotes] [ identifier[qstack] . identifier[pop] ()]= identifier[i]
identifier[notquoted_flag] = keyword[True]
keyword[elif] identifier[c] == literal[string] keyword[and] identifier[bels] [ identifier[prior_char] ]== literal[string] keyword[and] identifier[len] ( identifier[qstack] )== literal[int] :
identifier[errors] . identifier[append] (
(
literal[string] ,
literal[string] ,
( identifier[i] - literal[int] , identifier[i] - literal[int] ),
)
)
keyword[elif] identifier[notquoted_flag] keyword[and] identifier[c] == literal[string] keyword[and] identifier[bels] [ identifier[prior_char] ]== literal[string] :
keyword[if] identifier[len] ( identifier[nested_pstack] )> literal[int] :
identifier[errors] . identifier[append] (
(
literal[string] ,
literal[string] ,
( identifier[i] , identifier[i] ),
)
)
identifier[nested_pstack] . identifier[append] ( identifier[i] )
keyword[elif] identifier[notquoted_flag] keyword[and] identifier[c] == literal[string] keyword[and] identifier[bels] [ identifier[prior_char] ] keyword[not] keyword[in] [ literal[string] ]:
identifier[pstack] . identifier[append] ( identifier[i] )
keyword[elif] identifier[notquoted_flag] keyword[and] identifier[c] == literal[string] keyword[and] identifier[bels] [ identifier[prior_char] ]!= literal[string] :
keyword[if] identifier[len] ( identifier[pstack] ):
keyword[if] identifier[len] ( identifier[pstack] )> literal[int] :
identifier[parens] [ identifier[pstack] . identifier[pop] ()]=( identifier[i] , literal[string] )
keyword[else] :
identifier[parens] [ identifier[pstack] . identifier[pop] ()]=( identifier[i] , literal[string] )
keyword[elif] identifier[len] ( identifier[nested_pstack] ):
identifier[nested_parens] [ identifier[nested_pstack] . identifier[pop] ()]=( identifier[i] , literal[string] )
keyword[else] :
identifier[errors] . identifier[append] (
(
literal[string] ,
literal[string] ,
( identifier[i] , identifier[i] ),
)
)
keyword[elif] identifier[notquoted_flag] keyword[and] identifier[c] == literal[string] keyword[and] identifier[len] ( identifier[qstack] )== literal[int] :
identifier[sparen] = identifier[pstack] [- literal[int] ]
keyword[if] identifier[sparen] keyword[not] keyword[in] identifier[commas] :
identifier[commas] [ identifier[sparen] ]=[ identifier[i] ]
keyword[else] :
identifier[commas] [ identifier[sparen] ]. identifier[append] ( identifier[i] )
keyword[while] identifier[len] ( identifier[pstack] ):
identifier[errors] . identifier[append] (
(
literal[string] ,
literal[string] ,
( identifier[pstack] [- literal[int] ], identifier[pstack] [- literal[int] ]),
)
)
keyword[if] identifier[len] ( identifier[pstack] )> literal[int] :
identifier[parens] [ identifier[pstack] . identifier[pop] ()]=(- literal[int] , literal[string] )
keyword[else] :
identifier[parens] [ identifier[pstack] . identifier[pop] ()]=(- literal[int] , literal[string] )
keyword[while] identifier[len] ( identifier[nested_pstack] ):
identifier[errors] . identifier[append] (
(
literal[string] ,
literal[string] ,
( identifier[nested_pstack] [- literal[int] ], identifier[nested_pstack] [- literal[int] ]),
)
)
identifier[nested_parens] [ identifier[nested_pstack] . identifier[pop] ()]=(- literal[int] , literal[string] )
keyword[if] identifier[len] ( identifier[qstack] ):
identifier[missing_quote] = identifier[qstack] . identifier[pop] ()
identifier[errors] . identifier[append] (
(
literal[string] ,
literal[string] ,
( identifier[missing_quote] , identifier[missing_quote] ),
)
)
keyword[return] (
{
literal[string] : identifier[parens] ,
literal[string] : identifier[nested_parens] ,
literal[string] : identifier[quotes] ,
literal[string] : identifier[commas] ,
},
identifier[errors] ,
)
|
def parse_chars(bels: list, errors: Errors) -> Tuple[CharLocs, Errors]:
"""Scan BEL string to map parens, quotes, commas
Args:
bels: bel string as an array of characters
errors: list of error tuples ('<type>', '<msg>')
Returns:
(char_locs, errors): character locations and errors
"""
(pstack, qstack, nested_pstack) = ([], [], [])
(parens, nested_parens, quotes, commas) = ({}, {}, {}, {})
notquoted_flag = True
for (i, c) in enumerate(bels):
prior_char = i - 1
# print('BEL', prior_char, b[prior_char])
# Find starting quote
if c == '"' and bels[prior_char] != '\\' and (len(qstack) == 0):
qstack.append(i)
notquoted_flag = False # depends on [control=['if'], data=[]]
# Find closing quote
elif c == '"' and bels[prior_char] != '\\':
quotes[qstack.pop()] = i
notquoted_flag = True # depends on [control=['if'], data=[]]
# Find all escaped quotes outside of quoted string
elif c == '"' and bels[prior_char] == '\\' and (len(qstack) == 0):
errors.append(('ERROR', f'Escaped quote outside of quoted string at location: {i - 1}', (i - 1, i - 1))) # depends on [control=['if'], data=[]]
# Find all nested object opening parens
elif notquoted_flag and c == '(' and (bels[prior_char] == ' '):
if len(nested_pstack) > 1:
errors.append(('ERROR', f'More than one nested parenthesis or left parenthesis following a space character', (i, i))) # depends on [control=['if'], data=[]]
nested_pstack.append(i) # depends on [control=['if'], data=[]]
# Find all opening parens
elif notquoted_flag and c == '(' and (bels[prior_char] not in ['\\']):
pstack.append(i) # depends on [control=['if'], data=[]]
# Find all closing parens
elif notquoted_flag and c == ')' and (bels[prior_char] != '\\'):
if len(pstack):
if len(pstack) > 1:
parens[pstack.pop()] = (i, 'child') # depends on [control=['if'], data=[]]
else:
parens[pstack.pop()] = (i, 'top') # depends on [control=['if'], data=[]]
elif len(nested_pstack):
nested_parens[nested_pstack.pop()] = (i, 'top') # depends on [control=['if'], data=[]]
else:
errors.append(('ERROR', f'Missing left parenthesis for right parenthesis at location {i}', (i, i))) # depends on [control=['if'], data=[]]
# Find comma outside of quoted string
elif notquoted_flag and c == ',' and (len(qstack) == 0):
sparen = pstack[-1]
if sparen not in commas:
commas[sparen] = [i] # depends on [control=['if'], data=['sparen', 'commas']]
else:
commas[sparen].append(i) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
while len(pstack):
errors.append(('ERROR', f'Missing right parenthesis for left parenthesis at location {pstack[-1]}', (pstack[-1], pstack[-1])))
if len(pstack) > 1:
parens[pstack.pop()] = (-1, 'child') # depends on [control=['if'], data=[]]
else:
parens[pstack.pop()] = (-1, 'top') # depends on [control=['while'], data=[]]
while len(nested_pstack):
errors.append(('ERROR', f'Missing right parenthesis for nested object left parenthesis at location {nested_pstack[-1]}', (nested_pstack[-1], nested_pstack[-1])))
nested_parens[nested_pstack.pop()] = (-1, 'top') # depends on [control=['while'], data=[]]
if len(qstack):
missing_quote = qstack.pop()
errors.append(('ERROR', f'Missing right quote for left quote at location {missing_quote}', (missing_quote, missing_quote))) # depends on [control=['if'], data=[]]
return ({'parens': parens, 'nested_parens': nested_parens, 'quotes': quotes, 'commas': commas}, errors)
|
def get_wrapped_instance(self, instance=None):
"""Returns a wrapped model instance.
"""
if instance._meta.label_lower not in self.registry:
raise ModelNotRegistered(f"{repr(instance)} is not registered with {self}.")
wrapper_cls = self.registry.get(instance._meta.label_lower) or self.wrapper_cls
if wrapper_cls:
return wrapper_cls(instance)
return instance
|
def function[get_wrapped_instance, parameter[self, instance]]:
constant[Returns a wrapped model instance.
]
if compare[name[instance]._meta.label_lower <ast.NotIn object at 0x7da2590d7190> name[self].registry] begin[:]
<ast.Raise object at 0x7da2054a41c0>
variable[wrapper_cls] assign[=] <ast.BoolOp object at 0x7da1b18bada0>
if name[wrapper_cls] begin[:]
return[call[name[wrapper_cls], parameter[name[instance]]]]
return[name[instance]]
|
keyword[def] identifier[get_wrapped_instance] ( identifier[self] , identifier[instance] = keyword[None] ):
literal[string]
keyword[if] identifier[instance] . identifier[_meta] . identifier[label_lower] keyword[not] keyword[in] identifier[self] . identifier[registry] :
keyword[raise] identifier[ModelNotRegistered] ( literal[string] )
identifier[wrapper_cls] = identifier[self] . identifier[registry] . identifier[get] ( identifier[instance] . identifier[_meta] . identifier[label_lower] ) keyword[or] identifier[self] . identifier[wrapper_cls]
keyword[if] identifier[wrapper_cls] :
keyword[return] identifier[wrapper_cls] ( identifier[instance] )
keyword[return] identifier[instance]
|
def get_wrapped_instance(self, instance=None):
"""Returns a wrapped model instance.
"""
if instance._meta.label_lower not in self.registry:
raise ModelNotRegistered(f'{repr(instance)} is not registered with {self}.') # depends on [control=['if'], data=[]]
wrapper_cls = self.registry.get(instance._meta.label_lower) or self.wrapper_cls
if wrapper_cls:
return wrapper_cls(instance) # depends on [control=['if'], data=[]]
return instance
|
def clear(self):
"""
Clear screen and go to 0,0
"""
# Erase current output first.
self.erase()
# Send "Erase Screen" command and go to (0, 0).
output = self.output
output.erase_screen()
output.cursor_goto(0, 0)
output.flush()
self.request_absolute_cursor_position()
|
def function[clear, parameter[self]]:
constant[
Clear screen and go to 0,0
]
call[name[self].erase, parameter[]]
variable[output] assign[=] name[self].output
call[name[output].erase_screen, parameter[]]
call[name[output].cursor_goto, parameter[constant[0], constant[0]]]
call[name[output].flush, parameter[]]
call[name[self].request_absolute_cursor_position, parameter[]]
|
keyword[def] identifier[clear] ( identifier[self] ):
literal[string]
identifier[self] . identifier[erase] ()
identifier[output] = identifier[self] . identifier[output]
identifier[output] . identifier[erase_screen] ()
identifier[output] . identifier[cursor_goto] ( literal[int] , literal[int] )
identifier[output] . identifier[flush] ()
identifier[self] . identifier[request_absolute_cursor_position] ()
|
def clear(self):
"""
Clear screen and go to 0,0
"""
# Erase current output first.
self.erase()
# Send "Erase Screen" command and go to (0, 0).
output = self.output
output.erase_screen()
output.cursor_goto(0, 0)
output.flush()
self.request_absolute_cursor_position()
|
def makeQ(r1, r2, r3, r4=0):
"""
matrix involved in quaternion rotation
"""
Q = np.asarray([
[r4, -r3, r2, r1],
[r3, r4, -r1, r2],
[-r2, r1, r4, r3],
[-r1, -r2, -r3, r4]])
return Q
|
def function[makeQ, parameter[r1, r2, r3, r4]]:
constant[
matrix involved in quaternion rotation
]
variable[Q] assign[=] call[name[np].asarray, parameter[list[[<ast.List object at 0x7da1b0721f00>, <ast.List object at 0x7da1b0720be0>, <ast.List object at 0x7da1b0720970>, <ast.List object at 0x7da1b0723160>]]]]
return[name[Q]]
|
keyword[def] identifier[makeQ] ( identifier[r1] , identifier[r2] , identifier[r3] , identifier[r4] = literal[int] ):
literal[string]
identifier[Q] = identifier[np] . identifier[asarray] ([
[ identifier[r4] ,- identifier[r3] , identifier[r2] , identifier[r1] ],
[ identifier[r3] , identifier[r4] ,- identifier[r1] , identifier[r2] ],
[- identifier[r2] , identifier[r1] , identifier[r4] , identifier[r3] ],
[- identifier[r1] ,- identifier[r2] ,- identifier[r3] , identifier[r4] ]])
keyword[return] identifier[Q]
|
def makeQ(r1, r2, r3, r4=0):
"""
matrix involved in quaternion rotation
"""
Q = np.asarray([[r4, -r3, r2, r1], [r3, r4, -r1, r2], [-r2, r1, r4, r3], [-r1, -r2, -r3, r4]])
return Q
|
def manage(group_id):
"""Manage your group."""
group = Group.query.get_or_404(group_id)
form = GroupForm(request.form, obj=group)
if form.validate_on_submit():
if group.can_edit(current_user):
try:
group.update(**form.data)
flash(_('Group "%(name)s" was updated', name=group.name),
'success')
except Exception as e:
flash(str(e), 'error')
return render_template(
"invenio_groups/new.html",
form=form,
group=group,
)
else:
flash(
_(
'You cannot edit group %(group_name)s',
group_name=group.name
),
'error'
)
return render_template(
"invenio_groups/new.html",
form=form,
group=group,
)
|
def function[manage, parameter[group_id]]:
constant[Manage your group.]
variable[group] assign[=] call[name[Group].query.get_or_404, parameter[name[group_id]]]
variable[form] assign[=] call[name[GroupForm], parameter[name[request].form]]
if call[name[form].validate_on_submit, parameter[]] begin[:]
if call[name[group].can_edit, parameter[name[current_user]]] begin[:]
<ast.Try object at 0x7da207f998d0>
return[call[name[render_template], parameter[constant[invenio_groups/new.html]]]]
|
keyword[def] identifier[manage] ( identifier[group_id] ):
literal[string]
identifier[group] = identifier[Group] . identifier[query] . identifier[get_or_404] ( identifier[group_id] )
identifier[form] = identifier[GroupForm] ( identifier[request] . identifier[form] , identifier[obj] = identifier[group] )
keyword[if] identifier[form] . identifier[validate_on_submit] ():
keyword[if] identifier[group] . identifier[can_edit] ( identifier[current_user] ):
keyword[try] :
identifier[group] . identifier[update] (** identifier[form] . identifier[data] )
identifier[flash] ( identifier[_] ( literal[string] , identifier[name] = identifier[group] . identifier[name] ),
literal[string] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[flash] ( identifier[str] ( identifier[e] ), literal[string] )
keyword[return] identifier[render_template] (
literal[string] ,
identifier[form] = identifier[form] ,
identifier[group] = identifier[group] ,
)
keyword[else] :
identifier[flash] (
identifier[_] (
literal[string] ,
identifier[group_name] = identifier[group] . identifier[name]
),
literal[string]
)
keyword[return] identifier[render_template] (
literal[string] ,
identifier[form] = identifier[form] ,
identifier[group] = identifier[group] ,
)
|
def manage(group_id):
"""Manage your group."""
group = Group.query.get_or_404(group_id)
form = GroupForm(request.form, obj=group)
if form.validate_on_submit():
if group.can_edit(current_user):
try:
group.update(**form.data)
flash(_('Group "%(name)s" was updated', name=group.name), 'success') # depends on [control=['try'], data=[]]
except Exception as e:
flash(str(e), 'error')
return render_template('invenio_groups/new.html', form=form, group=group) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]]
else:
flash(_('You cannot edit group %(group_name)s', group_name=group.name), 'error') # depends on [control=['if'], data=[]]
return render_template('invenio_groups/new.html', form=form, group=group)
|
def ret_glob_minions(self):
'''
Return minions that match via glob
'''
fnfilter = functools.partial(fnmatch.filter, pat=self.tgt)
return self._ret_minions(fnfilter)
|
def function[ret_glob_minions, parameter[self]]:
constant[
Return minions that match via glob
]
variable[fnfilter] assign[=] call[name[functools].partial, parameter[name[fnmatch].filter]]
return[call[name[self]._ret_minions, parameter[name[fnfilter]]]]
|
keyword[def] identifier[ret_glob_minions] ( identifier[self] ):
literal[string]
identifier[fnfilter] = identifier[functools] . identifier[partial] ( identifier[fnmatch] . identifier[filter] , identifier[pat] = identifier[self] . identifier[tgt] )
keyword[return] identifier[self] . identifier[_ret_minions] ( identifier[fnfilter] )
|
def ret_glob_minions(self):
"""
Return minions that match via glob
"""
fnfilter = functools.partial(fnmatch.filter, pat=self.tgt)
return self._ret_minions(fnfilter)
|
def get(self, request, *args, **kwargs):
"""Override default get function to use token if there is one to retrieve object. If a
subclass should use their own GET implementation, token_from_kwargs should be called if
that detail view should be accessible via token."""
self.object = self.get_object()
allow_anonymous = kwargs.get("allow_anonymous", False)
# We only want to redirect is that setting is true, and we're not allowing anonymous users
if self.redirect_correct_path and not allow_anonymous:
# Also we obviously only want to redirect if the URL is wrong
if self.request.path != self.object.get_absolute_url():
return HttpResponsePermanentRedirect(self.object.get_absolute_url())
context = self.get_context_data(object=self.object)
response = self.render_to_response(context)
# If we have an unpublished article....
if self.object.published is None or self.object.published > timezone.now():
# And the user doesn't have permission to view this
if not request.user.is_staff and not allow_anonymous:
response = redirect_unpublished_to_login_or_404(
request=request,
next_url=self.object.get_absolute_url(),
next_params=request.GET)
# Never cache unpublished articles
add_never_cache_headers(response)
else:
response["Vary"] = "Accept-Encoding"
return response
|
def function[get, parameter[self, request]]:
constant[Override default get function to use token if there is one to retrieve object. If a
subclass should use their own GET implementation, token_from_kwargs should be called if
that detail view should be accessible via token.]
name[self].object assign[=] call[name[self].get_object, parameter[]]
variable[allow_anonymous] assign[=] call[name[kwargs].get, parameter[constant[allow_anonymous], constant[False]]]
if <ast.BoolOp object at 0x7da1b0b3ad70> begin[:]
if compare[name[self].request.path not_equal[!=] call[name[self].object.get_absolute_url, parameter[]]] begin[:]
return[call[name[HttpResponsePermanentRedirect], parameter[call[name[self].object.get_absolute_url, parameter[]]]]]
variable[context] assign[=] call[name[self].get_context_data, parameter[]]
variable[response] assign[=] call[name[self].render_to_response, parameter[name[context]]]
if <ast.BoolOp object at 0x7da1b0aca530> begin[:]
if <ast.BoolOp object at 0x7da1b0acb4f0> begin[:]
variable[response] assign[=] call[name[redirect_unpublished_to_login_or_404], parameter[]]
call[name[add_never_cache_headers], parameter[name[response]]]
return[name[response]]
|
keyword[def] identifier[get] ( identifier[self] , identifier[request] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[object] = identifier[self] . identifier[get_object] ()
identifier[allow_anonymous] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] )
keyword[if] identifier[self] . identifier[redirect_correct_path] keyword[and] keyword[not] identifier[allow_anonymous] :
keyword[if] identifier[self] . identifier[request] . identifier[path] != identifier[self] . identifier[object] . identifier[get_absolute_url] ():
keyword[return] identifier[HttpResponsePermanentRedirect] ( identifier[self] . identifier[object] . identifier[get_absolute_url] ())
identifier[context] = identifier[self] . identifier[get_context_data] ( identifier[object] = identifier[self] . identifier[object] )
identifier[response] = identifier[self] . identifier[render_to_response] ( identifier[context] )
keyword[if] identifier[self] . identifier[object] . identifier[published] keyword[is] keyword[None] keyword[or] identifier[self] . identifier[object] . identifier[published] > identifier[timezone] . identifier[now] ():
keyword[if] keyword[not] identifier[request] . identifier[user] . identifier[is_staff] keyword[and] keyword[not] identifier[allow_anonymous] :
identifier[response] = identifier[redirect_unpublished_to_login_or_404] (
identifier[request] = identifier[request] ,
identifier[next_url] = identifier[self] . identifier[object] . identifier[get_absolute_url] (),
identifier[next_params] = identifier[request] . identifier[GET] )
identifier[add_never_cache_headers] ( identifier[response] )
keyword[else] :
identifier[response] [ literal[string] ]= literal[string]
keyword[return] identifier[response]
|
def get(self, request, *args, **kwargs):
"""Override default get function to use token if there is one to retrieve object. If a
subclass should use their own GET implementation, token_from_kwargs should be called if
that detail view should be accessible via token."""
self.object = self.get_object()
allow_anonymous = kwargs.get('allow_anonymous', False)
# We only want to redirect is that setting is true, and we're not allowing anonymous users
if self.redirect_correct_path and (not allow_anonymous):
# Also we obviously only want to redirect if the URL is wrong
if self.request.path != self.object.get_absolute_url():
return HttpResponsePermanentRedirect(self.object.get_absolute_url()) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
context = self.get_context_data(object=self.object)
response = self.render_to_response(context)
# If we have an unpublished article....
if self.object.published is None or self.object.published > timezone.now():
# And the user doesn't have permission to view this
if not request.user.is_staff and (not allow_anonymous):
response = redirect_unpublished_to_login_or_404(request=request, next_url=self.object.get_absolute_url(), next_params=request.GET) # depends on [control=['if'], data=[]]
# Never cache unpublished articles
add_never_cache_headers(response) # depends on [control=['if'], data=[]]
else:
response['Vary'] = 'Accept-Encoding'
return response
|
def encode_metadata_request(cls, topics=(), payloads=None):
"""
Encode a MetadataRequest
Arguments:
topics: list of strings
"""
if payloads is not None:
topics = payloads
return kafka.protocol.metadata.MetadataRequest[0](topics)
|
def function[encode_metadata_request, parameter[cls, topics, payloads]]:
constant[
Encode a MetadataRequest
Arguments:
topics: list of strings
]
if compare[name[payloads] is_not constant[None]] begin[:]
variable[topics] assign[=] name[payloads]
return[call[call[name[kafka].protocol.metadata.MetadataRequest][constant[0]], parameter[name[topics]]]]
|
keyword[def] identifier[encode_metadata_request] ( identifier[cls] , identifier[topics] =(), identifier[payloads] = keyword[None] ):
literal[string]
keyword[if] identifier[payloads] keyword[is] keyword[not] keyword[None] :
identifier[topics] = identifier[payloads]
keyword[return] identifier[kafka] . identifier[protocol] . identifier[metadata] . identifier[MetadataRequest] [ literal[int] ]( identifier[topics] )
|
def encode_metadata_request(cls, topics=(), payloads=None):
"""
Encode a MetadataRequest
Arguments:
topics: list of strings
"""
if payloads is not None:
topics = payloads # depends on [control=['if'], data=['payloads']]
return kafka.protocol.metadata.MetadataRequest[0](topics)
|
def froze_it(cls):
"""
Decorator to prevent from creating attributes in the object ouside __init__().
This decorator must be applied to the final class (doesn't work if a
decorated class is inherited).
Yoann's answer at http://stackoverflow.com/questions/3603502
"""
cls._frozen = False
def frozensetattr(self, key, value):
if self._frozen and not hasattr(self, key):
raise AttributeError("Attribute '{}' of class '{}' does not exist!"
.format(key, cls.__name__))
else:
object.__setattr__(self, key, value)
def init_decorator(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
func(self, *args, **kwargs)
self._frozen = True
return wrapper
cls.__setattr__ = frozensetattr
cls.__init__ = init_decorator(cls.__init__)
return cls
|
def function[froze_it, parameter[cls]]:
constant[
Decorator to prevent from creating attributes in the object ouside __init__().
This decorator must be applied to the final class (doesn't work if a
decorated class is inherited).
Yoann's answer at http://stackoverflow.com/questions/3603502
]
name[cls]._frozen assign[=] constant[False]
def function[frozensetattr, parameter[self, key, value]]:
if <ast.BoolOp object at 0x7da1b287ba60> begin[:]
<ast.Raise object at 0x7da1b287bd00>
def function[init_decorator, parameter[func]]:
def function[wrapper, parameter[self]]:
call[name[func], parameter[name[self], <ast.Starred object at 0x7da1b287aef0>]]
name[self]._frozen assign[=] constant[True]
return[name[wrapper]]
name[cls].__setattr__ assign[=] name[frozensetattr]
name[cls].__init__ assign[=] call[name[init_decorator], parameter[name[cls].__init__]]
return[name[cls]]
|
keyword[def] identifier[froze_it] ( identifier[cls] ):
literal[string]
identifier[cls] . identifier[_frozen] = keyword[False]
keyword[def] identifier[frozensetattr] ( identifier[self] , identifier[key] , identifier[value] ):
keyword[if] identifier[self] . identifier[_frozen] keyword[and] keyword[not] identifier[hasattr] ( identifier[self] , identifier[key] ):
keyword[raise] identifier[AttributeError] ( literal[string]
. identifier[format] ( identifier[key] , identifier[cls] . identifier[__name__] ))
keyword[else] :
identifier[object] . identifier[__setattr__] ( identifier[self] , identifier[key] , identifier[value] )
keyword[def] identifier[init_decorator] ( identifier[func] ):
@ identifier[wraps] ( identifier[func] )
keyword[def] identifier[wrapper] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
identifier[func] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] )
identifier[self] . identifier[_frozen] = keyword[True]
keyword[return] identifier[wrapper]
identifier[cls] . identifier[__setattr__] = identifier[frozensetattr]
identifier[cls] . identifier[__init__] = identifier[init_decorator] ( identifier[cls] . identifier[__init__] )
keyword[return] identifier[cls]
|
def froze_it(cls):
"""
Decorator to prevent from creating attributes in the object ouside __init__().
This decorator must be applied to the final class (doesn't work if a
decorated class is inherited).
Yoann's answer at http://stackoverflow.com/questions/3603502
"""
cls._frozen = False
def frozensetattr(self, key, value):
if self._frozen and (not hasattr(self, key)):
raise AttributeError("Attribute '{}' of class '{}' does not exist!".format(key, cls.__name__)) # depends on [control=['if'], data=[]]
else:
object.__setattr__(self, key, value)
def init_decorator(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
func(self, *args, **kwargs)
self._frozen = True
return wrapper
cls.__setattr__ = frozensetattr
cls.__init__ = init_decorator(cls.__init__)
return cls
|
def API520_W(Pset, Pback):
r'''Calculates capacity correction due to backpressure on balanced
spring-loaded PRVs in liquid service. For pilot operated valves,
this is always 1. Applicable up to 50% of the percent gauge backpressure,
For use in API 520 relief valve sizing. 1D interpolation among a table with
53 backpressures is performed.
Parameters
----------
Pset : float
Set pressure for relief [Pa]
Pback : float
Backpressure, [Pa]
Returns
-------
KW : float
Correction due to liquid backpressure [-]
Notes
-----
If the calculated gauge backpressure is less than 15%, a value of 1 is
returned.
Examples
--------
Custom example from figure 31:
>>> API520_W(1E6, 3E5) # 22% overpressure
0.9511471848008564
References
----------
.. [1] API Standard 520, Part 1 - Sizing and Selection.
'''
gauge_backpressure = (Pback-atm)/(Pset-atm)*100.0 # in percent
if gauge_backpressure < 15.0:
return 1.0
return interp(gauge_backpressure, Kw_x, Kw_y)
|
def function[API520_W, parameter[Pset, Pback]]:
constant[Calculates capacity correction due to backpressure on balanced
spring-loaded PRVs in liquid service. For pilot operated valves,
this is always 1. Applicable up to 50% of the percent gauge backpressure,
For use in API 520 relief valve sizing. 1D interpolation among a table with
53 backpressures is performed.
Parameters
----------
Pset : float
Set pressure for relief [Pa]
Pback : float
Backpressure, [Pa]
Returns
-------
KW : float
Correction due to liquid backpressure [-]
Notes
-----
If the calculated gauge backpressure is less than 15%, a value of 1 is
returned.
Examples
--------
Custom example from figure 31:
>>> API520_W(1E6, 3E5) # 22% overpressure
0.9511471848008564
References
----------
.. [1] API Standard 520, Part 1 - Sizing and Selection.
]
variable[gauge_backpressure] assign[=] binary_operation[binary_operation[binary_operation[name[Pback] - name[atm]] / binary_operation[name[Pset] - name[atm]]] * constant[100.0]]
if compare[name[gauge_backpressure] less[<] constant[15.0]] begin[:]
return[constant[1.0]]
return[call[name[interp], parameter[name[gauge_backpressure], name[Kw_x], name[Kw_y]]]]
|
keyword[def] identifier[API520_W] ( identifier[Pset] , identifier[Pback] ):
literal[string]
identifier[gauge_backpressure] =( identifier[Pback] - identifier[atm] )/( identifier[Pset] - identifier[atm] )* literal[int]
keyword[if] identifier[gauge_backpressure] < literal[int] :
keyword[return] literal[int]
keyword[return] identifier[interp] ( identifier[gauge_backpressure] , identifier[Kw_x] , identifier[Kw_y] )
|
def API520_W(Pset, Pback):
"""Calculates capacity correction due to backpressure on balanced
spring-loaded PRVs in liquid service. For pilot operated valves,
this is always 1. Applicable up to 50% of the percent gauge backpressure,
For use in API 520 relief valve sizing. 1D interpolation among a table with
53 backpressures is performed.
Parameters
----------
Pset : float
Set pressure for relief [Pa]
Pback : float
Backpressure, [Pa]
Returns
-------
KW : float
Correction due to liquid backpressure [-]
Notes
-----
If the calculated gauge backpressure is less than 15%, a value of 1 is
returned.
Examples
--------
Custom example from figure 31:
>>> API520_W(1E6, 3E5) # 22% overpressure
0.9511471848008564
References
----------
.. [1] API Standard 520, Part 1 - Sizing and Selection.
"""
gauge_backpressure = (Pback - atm) / (Pset - atm) * 100.0 # in percent
if gauge_backpressure < 15.0:
return 1.0 # depends on [control=['if'], data=[]]
return interp(gauge_backpressure, Kw_x, Kw_y)
|
def fitlin(imgarr,refarr):
""" Compute the least-squares fit between two arrays.
A Python translation of 'FITLIN' from 'drutil.f' (Drizzle V2.9).
"""
# Initialize variables
_mat = np.zeros((3,3),dtype=np.float64)
_xorg = imgarr[0][0]
_yorg = imgarr[0][1]
_xoorg = refarr[0][0]
_yoorg = refarr[0][1]
_sigxox = 0.
_sigxoy = 0.
_sigxo = 0.
_sigyox = 0.
_sigyoy = 0.
_sigyo = 0.
_npos = len(imgarr)
# Populate matrices
for i in range(_npos):
_mat[0][0] += np.power((imgarr[i][0] - _xorg),2)
_mat[0][1] += (imgarr[i][0] - _xorg) * (imgarr[i][1] - _yorg)
_mat[0][2] += (imgarr[i][0] - _xorg)
_mat[1][1] += np.power((imgarr[i][1] - _yorg),2)
_mat[1][2] += imgarr[i][1] - _yorg
_sigxox += (refarr[i][0] - _xoorg)*(imgarr[i][0] - _xorg)
_sigxoy += (refarr[i][0] - _xoorg)*(imgarr[i][1] - _yorg)
_sigxo += refarr[i][0] - _xoorg
_sigyox += (refarr[i][1] - _yoorg)*(imgarr[i][0] -_xorg)
_sigyoy += (refarr[i][1] - _yoorg)*(imgarr[i][1] - _yorg)
_sigyo += refarr[i][1] - _yoorg
_mat[2][2] = _npos
_mat[1][0] = _mat[0][1]
_mat[2][0] = _mat[0][2]
_mat[2][1] = _mat[1][2]
# Now invert this matrix
_mat = linalg.inv(_mat)
_a = _sigxox*_mat[0][0]+_sigxoy*_mat[0][1]+_sigxo*_mat[0][2]
_b = -1*(_sigxox*_mat[1][0]+_sigxoy*_mat[1][1]+_sigxo*_mat[1][2])
#_x0 = _sigxox*_mat[2][0]+_sigxoy*_mat[2][1]+_sigxo*_mat[2][2]
_c = _sigyox*_mat[1][0]+_sigyoy*_mat[1][1]+_sigyo*_mat[1][2]
_d = _sigyox*_mat[0][0]+_sigyoy*_mat[0][1]+_sigyo*_mat[0][2]
#_y0 = _sigyox*_mat[2][0]+_sigyoy*_mat[2][1]+_sigyo*_mat[2][2]
_xt = _xoorg - _a*_xorg+_b*_yorg
_yt = _yoorg - _d*_xorg-_c*_yorg
return [_a,_b,_xt],[_c,_d,_yt]
|
def function[fitlin, parameter[imgarr, refarr]]:
constant[ Compute the least-squares fit between two arrays.
A Python translation of 'FITLIN' from 'drutil.f' (Drizzle V2.9).
]
variable[_mat] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Constant object at 0x7da1b1bcbd90>, <ast.Constant object at 0x7da1b1bcb010>]]]]
variable[_xorg] assign[=] call[call[name[imgarr]][constant[0]]][constant[0]]
variable[_yorg] assign[=] call[call[name[imgarr]][constant[0]]][constant[1]]
variable[_xoorg] assign[=] call[call[name[refarr]][constant[0]]][constant[0]]
variable[_yoorg] assign[=] call[call[name[refarr]][constant[0]]][constant[1]]
variable[_sigxox] assign[=] constant[0.0]
variable[_sigxoy] assign[=] constant[0.0]
variable[_sigxo] assign[=] constant[0.0]
variable[_sigyox] assign[=] constant[0.0]
variable[_sigyoy] assign[=] constant[0.0]
variable[_sigyo] assign[=] constant[0.0]
variable[_npos] assign[=] call[name[len], parameter[name[imgarr]]]
for taget[name[i]] in starred[call[name[range], parameter[name[_npos]]]] begin[:]
<ast.AugAssign object at 0x7da1b1c224a0>
<ast.AugAssign object at 0x7da1b1c23eb0>
<ast.AugAssign object at 0x7da1b1c21990>
<ast.AugAssign object at 0x7da1b1c21780>
<ast.AugAssign object at 0x7da1b1c23dc0>
<ast.AugAssign object at 0x7da1b1bb29b0>
<ast.AugAssign object at 0x7da1b1bb2410>
<ast.AugAssign object at 0x7da1b1bb3310>
<ast.AugAssign object at 0x7da1b1bb00d0>
<ast.AugAssign object at 0x7da1b1bb3580>
<ast.AugAssign object at 0x7da1b1bb1660>
call[call[name[_mat]][constant[2]]][constant[2]] assign[=] name[_npos]
call[call[name[_mat]][constant[1]]][constant[0]] assign[=] call[call[name[_mat]][constant[0]]][constant[1]]
call[call[name[_mat]][constant[2]]][constant[0]] assign[=] call[call[name[_mat]][constant[0]]][constant[2]]
call[call[name[_mat]][constant[2]]][constant[1]] assign[=] call[call[name[_mat]][constant[1]]][constant[2]]
variable[_mat] assign[=] call[name[linalg].inv, parameter[name[_mat]]]
variable[_a] assign[=] binary_operation[binary_operation[binary_operation[name[_sigxox] * call[call[name[_mat]][constant[0]]][constant[0]]] + binary_operation[name[_sigxoy] * call[call[name[_mat]][constant[0]]][constant[1]]]] + binary_operation[name[_sigxo] * call[call[name[_mat]][constant[0]]][constant[2]]]]
variable[_b] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b1bbe440> * binary_operation[binary_operation[binary_operation[name[_sigxox] * call[call[name[_mat]][constant[1]]][constant[0]]] + binary_operation[name[_sigxoy] * call[call[name[_mat]][constant[1]]][constant[1]]]] + binary_operation[name[_sigxo] * call[call[name[_mat]][constant[1]]][constant[2]]]]]
variable[_c] assign[=] binary_operation[binary_operation[binary_operation[name[_sigyox] * call[call[name[_mat]][constant[1]]][constant[0]]] + binary_operation[name[_sigyoy] * call[call[name[_mat]][constant[1]]][constant[1]]]] + binary_operation[name[_sigyo] * call[call[name[_mat]][constant[1]]][constant[2]]]]
variable[_d] assign[=] binary_operation[binary_operation[binary_operation[name[_sigyox] * call[call[name[_mat]][constant[0]]][constant[0]]] + binary_operation[name[_sigyoy] * call[call[name[_mat]][constant[0]]][constant[1]]]] + binary_operation[name[_sigyo] * call[call[name[_mat]][constant[0]]][constant[2]]]]
variable[_xt] assign[=] binary_operation[binary_operation[name[_xoorg] - binary_operation[name[_a] * name[_xorg]]] + binary_operation[name[_b] * name[_yorg]]]
variable[_yt] assign[=] binary_operation[binary_operation[name[_yoorg] - binary_operation[name[_d] * name[_xorg]]] - binary_operation[name[_c] * name[_yorg]]]
return[tuple[[<ast.List object at 0x7da1b1bbdd80>, <ast.List object at 0x7da1b1bbeec0>]]]
|
keyword[def] identifier[fitlin] ( identifier[imgarr] , identifier[refarr] ):
literal[string]
identifier[_mat] = identifier[np] . identifier[zeros] (( literal[int] , literal[int] ), identifier[dtype] = identifier[np] . identifier[float64] )
identifier[_xorg] = identifier[imgarr] [ literal[int] ][ literal[int] ]
identifier[_yorg] = identifier[imgarr] [ literal[int] ][ literal[int] ]
identifier[_xoorg] = identifier[refarr] [ literal[int] ][ literal[int] ]
identifier[_yoorg] = identifier[refarr] [ literal[int] ][ literal[int] ]
identifier[_sigxox] = literal[int]
identifier[_sigxoy] = literal[int]
identifier[_sigxo] = literal[int]
identifier[_sigyox] = literal[int]
identifier[_sigyoy] = literal[int]
identifier[_sigyo] = literal[int]
identifier[_npos] = identifier[len] ( identifier[imgarr] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[_npos] ):
identifier[_mat] [ literal[int] ][ literal[int] ]+= identifier[np] . identifier[power] (( identifier[imgarr] [ identifier[i] ][ literal[int] ]- identifier[_xorg] ), literal[int] )
identifier[_mat] [ literal[int] ][ literal[int] ]+=( identifier[imgarr] [ identifier[i] ][ literal[int] ]- identifier[_xorg] )*( identifier[imgarr] [ identifier[i] ][ literal[int] ]- identifier[_yorg] )
identifier[_mat] [ literal[int] ][ literal[int] ]+=( identifier[imgarr] [ identifier[i] ][ literal[int] ]- identifier[_xorg] )
identifier[_mat] [ literal[int] ][ literal[int] ]+= identifier[np] . identifier[power] (( identifier[imgarr] [ identifier[i] ][ literal[int] ]- identifier[_yorg] ), literal[int] )
identifier[_mat] [ literal[int] ][ literal[int] ]+= identifier[imgarr] [ identifier[i] ][ literal[int] ]- identifier[_yorg]
identifier[_sigxox] +=( identifier[refarr] [ identifier[i] ][ literal[int] ]- identifier[_xoorg] )*( identifier[imgarr] [ identifier[i] ][ literal[int] ]- identifier[_xorg] )
identifier[_sigxoy] +=( identifier[refarr] [ identifier[i] ][ literal[int] ]- identifier[_xoorg] )*( identifier[imgarr] [ identifier[i] ][ literal[int] ]- identifier[_yorg] )
identifier[_sigxo] += identifier[refarr] [ identifier[i] ][ literal[int] ]- identifier[_xoorg]
identifier[_sigyox] +=( identifier[refarr] [ identifier[i] ][ literal[int] ]- identifier[_yoorg] )*( identifier[imgarr] [ identifier[i] ][ literal[int] ]- identifier[_xorg] )
identifier[_sigyoy] +=( identifier[refarr] [ identifier[i] ][ literal[int] ]- identifier[_yoorg] )*( identifier[imgarr] [ identifier[i] ][ literal[int] ]- identifier[_yorg] )
identifier[_sigyo] += identifier[refarr] [ identifier[i] ][ literal[int] ]- identifier[_yoorg]
identifier[_mat] [ literal[int] ][ literal[int] ]= identifier[_npos]
identifier[_mat] [ literal[int] ][ literal[int] ]= identifier[_mat] [ literal[int] ][ literal[int] ]
identifier[_mat] [ literal[int] ][ literal[int] ]= identifier[_mat] [ literal[int] ][ literal[int] ]
identifier[_mat] [ literal[int] ][ literal[int] ]= identifier[_mat] [ literal[int] ][ literal[int] ]
identifier[_mat] = identifier[linalg] . identifier[inv] ( identifier[_mat] )
identifier[_a] = identifier[_sigxox] * identifier[_mat] [ literal[int] ][ literal[int] ]+ identifier[_sigxoy] * identifier[_mat] [ literal[int] ][ literal[int] ]+ identifier[_sigxo] * identifier[_mat] [ literal[int] ][ literal[int] ]
identifier[_b] =- literal[int] *( identifier[_sigxox] * identifier[_mat] [ literal[int] ][ literal[int] ]+ identifier[_sigxoy] * identifier[_mat] [ literal[int] ][ literal[int] ]+ identifier[_sigxo] * identifier[_mat] [ literal[int] ][ literal[int] ])
identifier[_c] = identifier[_sigyox] * identifier[_mat] [ literal[int] ][ literal[int] ]+ identifier[_sigyoy] * identifier[_mat] [ literal[int] ][ literal[int] ]+ identifier[_sigyo] * identifier[_mat] [ literal[int] ][ literal[int] ]
identifier[_d] = identifier[_sigyox] * identifier[_mat] [ literal[int] ][ literal[int] ]+ identifier[_sigyoy] * identifier[_mat] [ literal[int] ][ literal[int] ]+ identifier[_sigyo] * identifier[_mat] [ literal[int] ][ literal[int] ]
identifier[_xt] = identifier[_xoorg] - identifier[_a] * identifier[_xorg] + identifier[_b] * identifier[_yorg]
identifier[_yt] = identifier[_yoorg] - identifier[_d] * identifier[_xorg] - identifier[_c] * identifier[_yorg]
keyword[return] [ identifier[_a] , identifier[_b] , identifier[_xt] ],[ identifier[_c] , identifier[_d] , identifier[_yt] ]
|
def fitlin(imgarr, refarr):
""" Compute the least-squares fit between two arrays.
A Python translation of 'FITLIN' from 'drutil.f' (Drizzle V2.9).
"""
# Initialize variables
_mat = np.zeros((3, 3), dtype=np.float64)
_xorg = imgarr[0][0]
_yorg = imgarr[0][1]
_xoorg = refarr[0][0]
_yoorg = refarr[0][1]
_sigxox = 0.0
_sigxoy = 0.0
_sigxo = 0.0
_sigyox = 0.0
_sigyoy = 0.0
_sigyo = 0.0
_npos = len(imgarr)
# Populate matrices
for i in range(_npos):
_mat[0][0] += np.power(imgarr[i][0] - _xorg, 2)
_mat[0][1] += (imgarr[i][0] - _xorg) * (imgarr[i][1] - _yorg)
_mat[0][2] += imgarr[i][0] - _xorg
_mat[1][1] += np.power(imgarr[i][1] - _yorg, 2)
_mat[1][2] += imgarr[i][1] - _yorg
_sigxox += (refarr[i][0] - _xoorg) * (imgarr[i][0] - _xorg)
_sigxoy += (refarr[i][0] - _xoorg) * (imgarr[i][1] - _yorg)
_sigxo += refarr[i][0] - _xoorg
_sigyox += (refarr[i][1] - _yoorg) * (imgarr[i][0] - _xorg)
_sigyoy += (refarr[i][1] - _yoorg) * (imgarr[i][1] - _yorg)
_sigyo += refarr[i][1] - _yoorg # depends on [control=['for'], data=['i']]
_mat[2][2] = _npos
_mat[1][0] = _mat[0][1]
_mat[2][0] = _mat[0][2]
_mat[2][1] = _mat[1][2]
# Now invert this matrix
_mat = linalg.inv(_mat)
_a = _sigxox * _mat[0][0] + _sigxoy * _mat[0][1] + _sigxo * _mat[0][2]
_b = -1 * (_sigxox * _mat[1][0] + _sigxoy * _mat[1][1] + _sigxo * _mat[1][2])
#_x0 = _sigxox*_mat[2][0]+_sigxoy*_mat[2][1]+_sigxo*_mat[2][2]
_c = _sigyox * _mat[1][0] + _sigyoy * _mat[1][1] + _sigyo * _mat[1][2]
_d = _sigyox * _mat[0][0] + _sigyoy * _mat[0][1] + _sigyo * _mat[0][2]
#_y0 = _sigyox*_mat[2][0]+_sigyoy*_mat[2][1]+_sigyo*_mat[2][2]
_xt = _xoorg - _a * _xorg + _b * _yorg
_yt = _yoorg - _d * _xorg - _c * _yorg
return ([_a, _b, _xt], [_c, _d, _yt])
|
def get_default_config_help(self):
"""
Return help text for collector configuration.
"""
config_help = super(MemoryLxcCollector, self).get_default_config_help()
config_help.update({
"sys_path": "Defaults to '/sys/fs/cgroup/lxc'",
})
return config_help
|
def function[get_default_config_help, parameter[self]]:
constant[
Return help text for collector configuration.
]
variable[config_help] assign[=] call[call[name[super], parameter[name[MemoryLxcCollector], name[self]]].get_default_config_help, parameter[]]
call[name[config_help].update, parameter[dictionary[[<ast.Constant object at 0x7da1b194e260>], [<ast.Constant object at 0x7da1b194e230>]]]]
return[name[config_help]]
|
keyword[def] identifier[get_default_config_help] ( identifier[self] ):
literal[string]
identifier[config_help] = identifier[super] ( identifier[MemoryLxcCollector] , identifier[self] ). identifier[get_default_config_help] ()
identifier[config_help] . identifier[update] ({
literal[string] : literal[string] ,
})
keyword[return] identifier[config_help]
|
def get_default_config_help(self):
"""
Return help text for collector configuration.
"""
config_help = super(MemoryLxcCollector, self).get_default_config_help()
config_help.update({'sys_path': "Defaults to '/sys/fs/cgroup/lxc'"})
return config_help
|
def parse_xhtml_reaction_notes(entry):
"""Return reaction properties defined in the XHTML notes.
Older SBML models often define additional properties in the XHTML notes
section because structured methods for defining properties had not been
developed. This will try to parse the following properties: ``SUBSYSTEM``,
``GENE ASSOCIATION``, ``EC NUMBER``, ``AUTHORS``, ``CONFIDENCE``.
Args:
entry: :class:`SBMLReactionEntry`.
"""
properties = {}
if entry.xml_notes is not None:
cobra_notes = dict(parse_xhtml_notes(entry))
if 'subsystem' in cobra_notes:
properties['subsystem'] = cobra_notes['subsystem']
if 'gene_association' in cobra_notes:
properties['genes'] = cobra_notes['gene_association']
if 'ec_number' in cobra_notes:
properties['ec'] = cobra_notes['ec_number']
if 'authors' in cobra_notes:
properties['authors'] = [
a.strip() for a in cobra_notes['authors'].split(';')]
if 'confidence' in cobra_notes:
try:
value = int(cobra_notes['confidence'])
except ValueError:
logger.warning(
'Unable to parse confidence level for {} as an'
' integer: {}'.format(
entry.id, cobra_notes['confidence']))
value = cobra_notes['confidence']
properties['confidence'] = value
return properties
|
def function[parse_xhtml_reaction_notes, parameter[entry]]:
constant[Return reaction properties defined in the XHTML notes.
Older SBML models often define additional properties in the XHTML notes
section because structured methods for defining properties had not been
developed. This will try to parse the following properties: ``SUBSYSTEM``,
``GENE ASSOCIATION``, ``EC NUMBER``, ``AUTHORS``, ``CONFIDENCE``.
Args:
entry: :class:`SBMLReactionEntry`.
]
variable[properties] assign[=] dictionary[[], []]
if compare[name[entry].xml_notes is_not constant[None]] begin[:]
variable[cobra_notes] assign[=] call[name[dict], parameter[call[name[parse_xhtml_notes], parameter[name[entry]]]]]
if compare[constant[subsystem] in name[cobra_notes]] begin[:]
call[name[properties]][constant[subsystem]] assign[=] call[name[cobra_notes]][constant[subsystem]]
if compare[constant[gene_association] in name[cobra_notes]] begin[:]
call[name[properties]][constant[genes]] assign[=] call[name[cobra_notes]][constant[gene_association]]
if compare[constant[ec_number] in name[cobra_notes]] begin[:]
call[name[properties]][constant[ec]] assign[=] call[name[cobra_notes]][constant[ec_number]]
if compare[constant[authors] in name[cobra_notes]] begin[:]
call[name[properties]][constant[authors]] assign[=] <ast.ListComp object at 0x7da20c76e1d0>
if compare[constant[confidence] in name[cobra_notes]] begin[:]
<ast.Try object at 0x7da20c76c850>
call[name[properties]][constant[confidence]] assign[=] name[value]
return[name[properties]]
|
keyword[def] identifier[parse_xhtml_reaction_notes] ( identifier[entry] ):
literal[string]
identifier[properties] ={}
keyword[if] identifier[entry] . identifier[xml_notes] keyword[is] keyword[not] keyword[None] :
identifier[cobra_notes] = identifier[dict] ( identifier[parse_xhtml_notes] ( identifier[entry] ))
keyword[if] literal[string] keyword[in] identifier[cobra_notes] :
identifier[properties] [ literal[string] ]= identifier[cobra_notes] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[cobra_notes] :
identifier[properties] [ literal[string] ]= identifier[cobra_notes] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[cobra_notes] :
identifier[properties] [ literal[string] ]= identifier[cobra_notes] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[cobra_notes] :
identifier[properties] [ literal[string] ]=[
identifier[a] . identifier[strip] () keyword[for] identifier[a] keyword[in] identifier[cobra_notes] [ literal[string] ]. identifier[split] ( literal[string] )]
keyword[if] literal[string] keyword[in] identifier[cobra_notes] :
keyword[try] :
identifier[value] = identifier[int] ( identifier[cobra_notes] [ literal[string] ])
keyword[except] identifier[ValueError] :
identifier[logger] . identifier[warning] (
literal[string]
literal[string] . identifier[format] (
identifier[entry] . identifier[id] , identifier[cobra_notes] [ literal[string] ]))
identifier[value] = identifier[cobra_notes] [ literal[string] ]
identifier[properties] [ literal[string] ]= identifier[value]
keyword[return] identifier[properties]
|
def parse_xhtml_reaction_notes(entry):
"""Return reaction properties defined in the XHTML notes.
Older SBML models often define additional properties in the XHTML notes
section because structured methods for defining properties had not been
developed. This will try to parse the following properties: ``SUBSYSTEM``,
``GENE ASSOCIATION``, ``EC NUMBER``, ``AUTHORS``, ``CONFIDENCE``.
Args:
entry: :class:`SBMLReactionEntry`.
"""
properties = {}
if entry.xml_notes is not None:
cobra_notes = dict(parse_xhtml_notes(entry))
if 'subsystem' in cobra_notes:
properties['subsystem'] = cobra_notes['subsystem'] # depends on [control=['if'], data=['cobra_notes']]
if 'gene_association' in cobra_notes:
properties['genes'] = cobra_notes['gene_association'] # depends on [control=['if'], data=['cobra_notes']]
if 'ec_number' in cobra_notes:
properties['ec'] = cobra_notes['ec_number'] # depends on [control=['if'], data=['cobra_notes']]
if 'authors' in cobra_notes:
properties['authors'] = [a.strip() for a in cobra_notes['authors'].split(';')] # depends on [control=['if'], data=['cobra_notes']]
if 'confidence' in cobra_notes:
try:
value = int(cobra_notes['confidence']) # depends on [control=['try'], data=[]]
except ValueError:
logger.warning('Unable to parse confidence level for {} as an integer: {}'.format(entry.id, cobra_notes['confidence']))
value = cobra_notes['confidence'] # depends on [control=['except'], data=[]]
properties['confidence'] = value # depends on [control=['if'], data=['cobra_notes']] # depends on [control=['if'], data=[]]
return properties
|
def setActionManifestPath(self, pchActionManifestPath):
"""
Sets the path to the action manifest JSON file that is used by this application. If this information
was set on the Steam partner site, calls to this function are ignored. If the Steam partner site
setting and the path provided by this call are different, VRInputError_MismatchedActionManifest is returned.
This call must be made before the first call to UpdateActionState or IVRSystem::PollNextEvent.
"""
fn = self.function_table.setActionManifestPath
result = fn(pchActionManifestPath)
return result
|
def function[setActionManifestPath, parameter[self, pchActionManifestPath]]:
constant[
Sets the path to the action manifest JSON file that is used by this application. If this information
was set on the Steam partner site, calls to this function are ignored. If the Steam partner site
setting and the path provided by this call are different, VRInputError_MismatchedActionManifest is returned.
This call must be made before the first call to UpdateActionState or IVRSystem::PollNextEvent.
]
variable[fn] assign[=] name[self].function_table.setActionManifestPath
variable[result] assign[=] call[name[fn], parameter[name[pchActionManifestPath]]]
return[name[result]]
|
keyword[def] identifier[setActionManifestPath] ( identifier[self] , identifier[pchActionManifestPath] ):
literal[string]
identifier[fn] = identifier[self] . identifier[function_table] . identifier[setActionManifestPath]
identifier[result] = identifier[fn] ( identifier[pchActionManifestPath] )
keyword[return] identifier[result]
|
def setActionManifestPath(self, pchActionManifestPath):
"""
Sets the path to the action manifest JSON file that is used by this application. If this information
was set on the Steam partner site, calls to this function are ignored. If the Steam partner site
setting and the path provided by this call are different, VRInputError_MismatchedActionManifest is returned.
This call must be made before the first call to UpdateActionState or IVRSystem::PollNextEvent.
"""
fn = self.function_table.setActionManifestPath
result = fn(pchActionManifestPath)
return result
|
def valid_hotp(self, code, last=0, trials=100):
"""Valid a HOTP code.
:param code: A number that is less than 6 characters.
:param last: Guess HOTP code from last + 1 range.
:param trials: Guest HOTP code end at last + trials + 1.
"""
if not valid_code(code):
return False
code = bytes(int(code))
for i in range(last + 1, last + trials + 1):
if compare_digest(bytes(self.hotp(counter=i)), code):
return i
return False
|
def function[valid_hotp, parameter[self, code, last, trials]]:
constant[Valid a HOTP code.
:param code: A number that is less than 6 characters.
:param last: Guess HOTP code from last + 1 range.
:param trials: Guest HOTP code end at last + trials + 1.
]
if <ast.UnaryOp object at 0x7da18f09ffa0> begin[:]
return[constant[False]]
variable[code] assign[=] call[name[bytes], parameter[call[name[int], parameter[name[code]]]]]
for taget[name[i]] in starred[call[name[range], parameter[binary_operation[name[last] + constant[1]], binary_operation[binary_operation[name[last] + name[trials]] + constant[1]]]]] begin[:]
if call[name[compare_digest], parameter[call[name[bytes], parameter[call[name[self].hotp, parameter[]]]], name[code]]] begin[:]
return[name[i]]
return[constant[False]]
|
keyword[def] identifier[valid_hotp] ( identifier[self] , identifier[code] , identifier[last] = literal[int] , identifier[trials] = literal[int] ):
literal[string]
keyword[if] keyword[not] identifier[valid_code] ( identifier[code] ):
keyword[return] keyword[False]
identifier[code] = identifier[bytes] ( identifier[int] ( identifier[code] ))
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[last] + literal[int] , identifier[last] + identifier[trials] + literal[int] ):
keyword[if] identifier[compare_digest] ( identifier[bytes] ( identifier[self] . identifier[hotp] ( identifier[counter] = identifier[i] )), identifier[code] ):
keyword[return] identifier[i]
keyword[return] keyword[False]
|
def valid_hotp(self, code, last=0, trials=100):
"""Valid a HOTP code.
:param code: A number that is less than 6 characters.
:param last: Guess HOTP code from last + 1 range.
:param trials: Guest HOTP code end at last + trials + 1.
"""
if not valid_code(code):
return False # depends on [control=['if'], data=[]]
code = bytes(int(code))
for i in range(last + 1, last + trials + 1):
if compare_digest(bytes(self.hotp(counter=i)), code):
return i # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
return False
|
def process_results(self, paragraph):
"""Routes Zeppelin output types to corresponding handlers."""
if 'editorMode' in paragraph['config']:
mode = paragraph['config']['editorMode'].split('/')[-1]
if 'results' in paragraph and paragraph['results']['msg']:
msg = paragraph['results']['msg'][0]
if mode not in ('text', 'markdown'):
self.output_options[msg['type']](msg['data'])
|
def function[process_results, parameter[self, paragraph]]:
constant[Routes Zeppelin output types to corresponding handlers.]
if compare[constant[editorMode] in call[name[paragraph]][constant[config]]] begin[:]
variable[mode] assign[=] call[call[call[call[name[paragraph]][constant[config]]][constant[editorMode]].split, parameter[constant[/]]]][<ast.UnaryOp object at 0x7da20c76d150>]
if <ast.BoolOp object at 0x7da20c76f250> begin[:]
variable[msg] assign[=] call[call[call[name[paragraph]][constant[results]]][constant[msg]]][constant[0]]
if compare[name[mode] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da20c796620>, <ast.Constant object at 0x7da20c795cc0>]]] begin[:]
call[call[name[self].output_options][call[name[msg]][constant[type]]], parameter[call[name[msg]][constant[data]]]]
|
keyword[def] identifier[process_results] ( identifier[self] , identifier[paragraph] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[paragraph] [ literal[string] ]:
identifier[mode] = identifier[paragraph] [ literal[string] ][ literal[string] ]. identifier[split] ( literal[string] )[- literal[int] ]
keyword[if] literal[string] keyword[in] identifier[paragraph] keyword[and] identifier[paragraph] [ literal[string] ][ literal[string] ]:
identifier[msg] = identifier[paragraph] [ literal[string] ][ literal[string] ][ literal[int] ]
keyword[if] identifier[mode] keyword[not] keyword[in] ( literal[string] , literal[string] ):
identifier[self] . identifier[output_options] [ identifier[msg] [ literal[string] ]]( identifier[msg] [ literal[string] ])
|
def process_results(self, paragraph):
"""Routes Zeppelin output types to corresponding handlers."""
if 'editorMode' in paragraph['config']:
mode = paragraph['config']['editorMode'].split('/')[-1]
if 'results' in paragraph and paragraph['results']['msg']:
msg = paragraph['results']['msg'][0]
if mode not in ('text', 'markdown'):
self.output_options[msg['type']](msg['data']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
|
def get_wfsmat(l):
'''
l = ['v_7', 'v_3', 'v_1', 'v_4', ['v_4', 'v_2'], 'v_5', 'v_6', 'v_1', 'v_6', 'v_7', 'v_5', ['v_4', ['v_1', 'v_8', 'v_3', 'v_4', 'v_2', 'v_7', [['v_3', 'v_2'], 'v_4', 'v_5', 'v_1', 'v_3', 'v_1', 'v_2', 'v_5', 'v_8', 'v_8', 'v_7'], 'v_5', 'v_8', 'v_7', 'v_1', 'v_5'], 'v_6'], 'v_4', 'v_5', 'v_8', 'v_5']
get_wfs(l)
'''
ltree = ListTree(l)
vdescmat = ltree.desc
wfsmat = matrix_map(vdescmat,lambda v,ix,iy:v['path'])
wfsmat.pop(0)
return(wfsmat)
|
def function[get_wfsmat, parameter[l]]:
constant[
l = ['v_7', 'v_3', 'v_1', 'v_4', ['v_4', 'v_2'], 'v_5', 'v_6', 'v_1', 'v_6', 'v_7', 'v_5', ['v_4', ['v_1', 'v_8', 'v_3', 'v_4', 'v_2', 'v_7', [['v_3', 'v_2'], 'v_4', 'v_5', 'v_1', 'v_3', 'v_1', 'v_2', 'v_5', 'v_8', 'v_8', 'v_7'], 'v_5', 'v_8', 'v_7', 'v_1', 'v_5'], 'v_6'], 'v_4', 'v_5', 'v_8', 'v_5']
get_wfs(l)
]
variable[ltree] assign[=] call[name[ListTree], parameter[name[l]]]
variable[vdescmat] assign[=] name[ltree].desc
variable[wfsmat] assign[=] call[name[matrix_map], parameter[name[vdescmat], <ast.Lambda object at 0x7da1affee5c0>]]
call[name[wfsmat].pop, parameter[constant[0]]]
return[name[wfsmat]]
|
keyword[def] identifier[get_wfsmat] ( identifier[l] ):
literal[string]
identifier[ltree] = identifier[ListTree] ( identifier[l] )
identifier[vdescmat] = identifier[ltree] . identifier[desc]
identifier[wfsmat] = identifier[matrix_map] ( identifier[vdescmat] , keyword[lambda] identifier[v] , identifier[ix] , identifier[iy] : identifier[v] [ literal[string] ])
identifier[wfsmat] . identifier[pop] ( literal[int] )
keyword[return] ( identifier[wfsmat] )
|
def get_wfsmat(l):
"""
l = ['v_7', 'v_3', 'v_1', 'v_4', ['v_4', 'v_2'], 'v_5', 'v_6', 'v_1', 'v_6', 'v_7', 'v_5', ['v_4', ['v_1', 'v_8', 'v_3', 'v_4', 'v_2', 'v_7', [['v_3', 'v_2'], 'v_4', 'v_5', 'v_1', 'v_3', 'v_1', 'v_2', 'v_5', 'v_8', 'v_8', 'v_7'], 'v_5', 'v_8', 'v_7', 'v_1', 'v_5'], 'v_6'], 'v_4', 'v_5', 'v_8', 'v_5']
get_wfs(l)
"""
ltree = ListTree(l)
vdescmat = ltree.desc
wfsmat = matrix_map(vdescmat, lambda v, ix, iy: v['path'])
wfsmat.pop(0)
return wfsmat
|
def workspace_cli(ctx, directory, mets_basename, backup):
"""
Working with workspace
"""
ctx.obj = WorkspaceCtx(os.path.abspath(directory), mets_basename, automatic_backup=backup)
|
def function[workspace_cli, parameter[ctx, directory, mets_basename, backup]]:
constant[
Working with workspace
]
name[ctx].obj assign[=] call[name[WorkspaceCtx], parameter[call[name[os].path.abspath, parameter[name[directory]]], name[mets_basename]]]
|
keyword[def] identifier[workspace_cli] ( identifier[ctx] , identifier[directory] , identifier[mets_basename] , identifier[backup] ):
literal[string]
identifier[ctx] . identifier[obj] = identifier[WorkspaceCtx] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[directory] ), identifier[mets_basename] , identifier[automatic_backup] = identifier[backup] )
|
def workspace_cli(ctx, directory, mets_basename, backup):
"""
Working with workspace
"""
ctx.obj = WorkspaceCtx(os.path.abspath(directory), mets_basename, automatic_backup=backup)
|
def update(self, entry_id, new_label):
"""
Updates an entry with entry_id with the given label
Parameters
----------
entry_id : int
entry id of the sample to update.
label : {int, None}
Label of the sample to be update.
"""
self.data[entry_id] = (self.data[entry_id][0], new_label)
self.modified = True
for callback in self._update_callback:
callback(entry_id, new_label)
|
def function[update, parameter[self, entry_id, new_label]]:
constant[
Updates an entry with entry_id with the given label
Parameters
----------
entry_id : int
entry id of the sample to update.
label : {int, None}
Label of the sample to be update.
]
call[name[self].data][name[entry_id]] assign[=] tuple[[<ast.Subscript object at 0x7da1b08e7010>, <ast.Name object at 0x7da1b08e78b0>]]
name[self].modified assign[=] constant[True]
for taget[name[callback]] in starred[name[self]._update_callback] begin[:]
call[name[callback], parameter[name[entry_id], name[new_label]]]
|
keyword[def] identifier[update] ( identifier[self] , identifier[entry_id] , identifier[new_label] ):
literal[string]
identifier[self] . identifier[data] [ identifier[entry_id] ]=( identifier[self] . identifier[data] [ identifier[entry_id] ][ literal[int] ], identifier[new_label] )
identifier[self] . identifier[modified] = keyword[True]
keyword[for] identifier[callback] keyword[in] identifier[self] . identifier[_update_callback] :
identifier[callback] ( identifier[entry_id] , identifier[new_label] )
|
def update(self, entry_id, new_label):
"""
Updates an entry with entry_id with the given label
Parameters
----------
entry_id : int
entry id of the sample to update.
label : {int, None}
Label of the sample to be update.
"""
self.data[entry_id] = (self.data[entry_id][0], new_label)
self.modified = True
for callback in self._update_callback:
callback(entry_id, new_label) # depends on [control=['for'], data=['callback']]
|
def main():
""" Reports stats on the workflow, use with --stats option to toil.
"""
parser = getBasicOptionParser()
initializeOptions(parser)
options = parseBasicOptions(parser)
checkOptions(options, parser)
config = Config()
config.setOptions(options)
jobStore = Toil.resumeJobStore(config.jobStore)
stats = getStats(jobStore)
collatedStatsTag = processData(jobStore.config, stats)
reportData(collatedStatsTag, options)
|
def function[main, parameter[]]:
constant[ Reports stats on the workflow, use with --stats option to toil.
]
variable[parser] assign[=] call[name[getBasicOptionParser], parameter[]]
call[name[initializeOptions], parameter[name[parser]]]
variable[options] assign[=] call[name[parseBasicOptions], parameter[name[parser]]]
call[name[checkOptions], parameter[name[options], name[parser]]]
variable[config] assign[=] call[name[Config], parameter[]]
call[name[config].setOptions, parameter[name[options]]]
variable[jobStore] assign[=] call[name[Toil].resumeJobStore, parameter[name[config].jobStore]]
variable[stats] assign[=] call[name[getStats], parameter[name[jobStore]]]
variable[collatedStatsTag] assign[=] call[name[processData], parameter[name[jobStore].config, name[stats]]]
call[name[reportData], parameter[name[collatedStatsTag], name[options]]]
|
keyword[def] identifier[main] ():
literal[string]
identifier[parser] = identifier[getBasicOptionParser] ()
identifier[initializeOptions] ( identifier[parser] )
identifier[options] = identifier[parseBasicOptions] ( identifier[parser] )
identifier[checkOptions] ( identifier[options] , identifier[parser] )
identifier[config] = identifier[Config] ()
identifier[config] . identifier[setOptions] ( identifier[options] )
identifier[jobStore] = identifier[Toil] . identifier[resumeJobStore] ( identifier[config] . identifier[jobStore] )
identifier[stats] = identifier[getStats] ( identifier[jobStore] )
identifier[collatedStatsTag] = identifier[processData] ( identifier[jobStore] . identifier[config] , identifier[stats] )
identifier[reportData] ( identifier[collatedStatsTag] , identifier[options] )
|
def main():
""" Reports stats on the workflow, use with --stats option to toil.
"""
parser = getBasicOptionParser()
initializeOptions(parser)
options = parseBasicOptions(parser)
checkOptions(options, parser)
config = Config()
config.setOptions(options)
jobStore = Toil.resumeJobStore(config.jobStore)
stats = getStats(jobStore)
collatedStatsTag = processData(jobStore.config, stats)
reportData(collatedStatsTag, options)
|
def numpy_binning(data, bins=10, range=None, *args, **kwargs) -> NumpyBinning:
"""Construct binning schema compatible with numpy.histogram
Parameters
----------
data: array_like, optional
This is optional if both bins and range are set
bins: int or array_like
range: Optional[tuple]
(min, max)
includes_right_edge: Optional[bool]
default: True
See Also
--------
numpy.histogram
"""
if isinstance(bins, int):
if range:
bins = np.linspace(range[0], range[1], bins + 1)
else:
start = data.min()
stop = data.max()
bins = np.linspace(start, stop, bins + 1)
elif np.iterable(bins):
bins = np.asarray(bins)
else:
# Some numpy edge case
_, bins = np.histogram(data, bins, **kwargs)
return NumpyBinning(bins)
|
def function[numpy_binning, parameter[data, bins, range]]:
constant[Construct binning schema compatible with numpy.histogram
Parameters
----------
data: array_like, optional
This is optional if both bins and range are set
bins: int or array_like
range: Optional[tuple]
(min, max)
includes_right_edge: Optional[bool]
default: True
See Also
--------
numpy.histogram
]
if call[name[isinstance], parameter[name[bins], name[int]]] begin[:]
if name[range] begin[:]
variable[bins] assign[=] call[name[np].linspace, parameter[call[name[range]][constant[0]], call[name[range]][constant[1]], binary_operation[name[bins] + constant[1]]]]
return[call[name[NumpyBinning], parameter[name[bins]]]]
|
keyword[def] identifier[numpy_binning] ( identifier[data] , identifier[bins] = literal[int] , identifier[range] = keyword[None] ,* identifier[args] ,** identifier[kwargs] )-> identifier[NumpyBinning] :
literal[string]
keyword[if] identifier[isinstance] ( identifier[bins] , identifier[int] ):
keyword[if] identifier[range] :
identifier[bins] = identifier[np] . identifier[linspace] ( identifier[range] [ literal[int] ], identifier[range] [ literal[int] ], identifier[bins] + literal[int] )
keyword[else] :
identifier[start] = identifier[data] . identifier[min] ()
identifier[stop] = identifier[data] . identifier[max] ()
identifier[bins] = identifier[np] . identifier[linspace] ( identifier[start] , identifier[stop] , identifier[bins] + literal[int] )
keyword[elif] identifier[np] . identifier[iterable] ( identifier[bins] ):
identifier[bins] = identifier[np] . identifier[asarray] ( identifier[bins] )
keyword[else] :
identifier[_] , identifier[bins] = identifier[np] . identifier[histogram] ( identifier[data] , identifier[bins] ,** identifier[kwargs] )
keyword[return] identifier[NumpyBinning] ( identifier[bins] )
|
def numpy_binning(data, bins=10, range=None, *args, **kwargs) -> NumpyBinning:
"""Construct binning schema compatible with numpy.histogram
Parameters
----------
data: array_like, optional
This is optional if both bins and range are set
bins: int or array_like
range: Optional[tuple]
(min, max)
includes_right_edge: Optional[bool]
default: True
See Also
--------
numpy.histogram
"""
if isinstance(bins, int):
if range:
bins = np.linspace(range[0], range[1], bins + 1) # depends on [control=['if'], data=[]]
else:
start = data.min()
stop = data.max()
bins = np.linspace(start, stop, bins + 1) # depends on [control=['if'], data=[]]
elif np.iterable(bins):
bins = np.asarray(bins) # depends on [control=['if'], data=[]]
else:
# Some numpy edge case
(_, bins) = np.histogram(data, bins, **kwargs)
return NumpyBinning(bins)
|
def scalars_route(self, request):
"""Given a tag and single run, return array of ScalarEvents."""
# TODO: return HTTP status code for malformed requests
tag = request.args.get('tag')
run = request.args.get('run')
experiment = request.args.get('experiment')
output_format = request.args.get('format')
(body, mime_type) = self.scalars_impl(tag, run, experiment, output_format)
return http_util.Respond(request, body, mime_type)
|
def function[scalars_route, parameter[self, request]]:
constant[Given a tag and single run, return array of ScalarEvents.]
variable[tag] assign[=] call[name[request].args.get, parameter[constant[tag]]]
variable[run] assign[=] call[name[request].args.get, parameter[constant[run]]]
variable[experiment] assign[=] call[name[request].args.get, parameter[constant[experiment]]]
variable[output_format] assign[=] call[name[request].args.get, parameter[constant[format]]]
<ast.Tuple object at 0x7da1b21ce380> assign[=] call[name[self].scalars_impl, parameter[name[tag], name[run], name[experiment], name[output_format]]]
return[call[name[http_util].Respond, parameter[name[request], name[body], name[mime_type]]]]
|
keyword[def] identifier[scalars_route] ( identifier[self] , identifier[request] ):
literal[string]
identifier[tag] = identifier[request] . identifier[args] . identifier[get] ( literal[string] )
identifier[run] = identifier[request] . identifier[args] . identifier[get] ( literal[string] )
identifier[experiment] = identifier[request] . identifier[args] . identifier[get] ( literal[string] )
identifier[output_format] = identifier[request] . identifier[args] . identifier[get] ( literal[string] )
( identifier[body] , identifier[mime_type] )= identifier[self] . identifier[scalars_impl] ( identifier[tag] , identifier[run] , identifier[experiment] , identifier[output_format] )
keyword[return] identifier[http_util] . identifier[Respond] ( identifier[request] , identifier[body] , identifier[mime_type] )
|
def scalars_route(self, request):
"""Given a tag and single run, return array of ScalarEvents."""
# TODO: return HTTP status code for malformed requests
tag = request.args.get('tag')
run = request.args.get('run')
experiment = request.args.get('experiment')
output_format = request.args.get('format')
(body, mime_type) = self.scalars_impl(tag, run, experiment, output_format)
return http_util.Respond(request, body, mime_type)
|
def lazy_elemwise_func(array, func, dtype):
"""Lazily apply an element-wise function to an array.
Parameters
----------
array : any valid value of Variable._data
func : callable
Function to apply to indexed slices of an array. For use with dask,
this should be a pickle-able object.
dtype : coercible to np.dtype
Dtype for the result of this function.
Returns
-------
Either a dask.array.Array or _ElementwiseFunctionArray.
"""
if isinstance(array, dask_array_type):
return array.map_blocks(func, dtype=dtype)
else:
return _ElementwiseFunctionArray(array, func, dtype)
|
def function[lazy_elemwise_func, parameter[array, func, dtype]]:
constant[Lazily apply an element-wise function to an array.
Parameters
----------
array : any valid value of Variable._data
func : callable
Function to apply to indexed slices of an array. For use with dask,
this should be a pickle-able object.
dtype : coercible to np.dtype
Dtype for the result of this function.
Returns
-------
Either a dask.array.Array or _ElementwiseFunctionArray.
]
if call[name[isinstance], parameter[name[array], name[dask_array_type]]] begin[:]
return[call[name[array].map_blocks, parameter[name[func]]]]
|
keyword[def] identifier[lazy_elemwise_func] ( identifier[array] , identifier[func] , identifier[dtype] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[array] , identifier[dask_array_type] ):
keyword[return] identifier[array] . identifier[map_blocks] ( identifier[func] , identifier[dtype] = identifier[dtype] )
keyword[else] :
keyword[return] identifier[_ElementwiseFunctionArray] ( identifier[array] , identifier[func] , identifier[dtype] )
|
def lazy_elemwise_func(array, func, dtype):
"""Lazily apply an element-wise function to an array.
Parameters
----------
array : any valid value of Variable._data
func : callable
Function to apply to indexed slices of an array. For use with dask,
this should be a pickle-able object.
dtype : coercible to np.dtype
Dtype for the result of this function.
Returns
-------
Either a dask.array.Array or _ElementwiseFunctionArray.
"""
if isinstance(array, dask_array_type):
return array.map_blocks(func, dtype=dtype) # depends on [control=['if'], data=[]]
else:
return _ElementwiseFunctionArray(array, func, dtype)
|
def get_logfile_name(tags):
"""Formulates a log file name that incorporates the provided tags.
The log file will be located in ``scgpm_seqresults_dnanexus.LOG_DIR``.
Args:
tags: `list` of tags to append to the log file name. Each tag will be '_' delimited. Each tag
will be added in the same order as provided.
"""
if not os.path.exists(sd.LOG_DIR):
os.mkdir(sd.LOG_DIR)
filename = "log"
for tag in tags:
filename += "_{}".format(tag)
filename += ".txt"
filename = os.path.join(sd.LOG_DIR,filename)
return filename
|
def function[get_logfile_name, parameter[tags]]:
constant[Formulates a log file name that incorporates the provided tags.
The log file will be located in ``scgpm_seqresults_dnanexus.LOG_DIR``.
Args:
tags: `list` of tags to append to the log file name. Each tag will be '_' delimited. Each tag
will be added in the same order as provided.
]
if <ast.UnaryOp object at 0x7da1b1342560> begin[:]
call[name[os].mkdir, parameter[name[sd].LOG_DIR]]
variable[filename] assign[=] constant[log]
for taget[name[tag]] in starred[name[tags]] begin[:]
<ast.AugAssign object at 0x7da1b13410f0>
<ast.AugAssign object at 0x7da1b1343df0>
variable[filename] assign[=] call[name[os].path.join, parameter[name[sd].LOG_DIR, name[filename]]]
return[name[filename]]
|
keyword[def] identifier[get_logfile_name] ( identifier[tags] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[sd] . identifier[LOG_DIR] ):
identifier[os] . identifier[mkdir] ( identifier[sd] . identifier[LOG_DIR] )
identifier[filename] = literal[string]
keyword[for] identifier[tag] keyword[in] identifier[tags] :
identifier[filename] += literal[string] . identifier[format] ( identifier[tag] )
identifier[filename] += literal[string]
identifier[filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[sd] . identifier[LOG_DIR] , identifier[filename] )
keyword[return] identifier[filename]
|
def get_logfile_name(tags):
"""Formulates a log file name that incorporates the provided tags.
The log file will be located in ``scgpm_seqresults_dnanexus.LOG_DIR``.
Args:
tags: `list` of tags to append to the log file name. Each tag will be '_' delimited. Each tag
will be added in the same order as provided.
"""
if not os.path.exists(sd.LOG_DIR):
os.mkdir(sd.LOG_DIR) # depends on [control=['if'], data=[]]
filename = 'log'
for tag in tags:
filename += '_{}'.format(tag) # depends on [control=['for'], data=['tag']]
filename += '.txt'
filename = os.path.join(sd.LOG_DIR, filename)
return filename
|
def delete(self, bundleId):
"""
Delete a device management extension package
It accepts bundleId (string) as parameters
In case of failure it throws APIException
"""
url = "api/v0002/mgmt/custom/bundle/%s" % (bundleId)
r = self._apiClient.delete(url)
if r.status_code == 204:
return True
else:
raise ApiException(r)
|
def function[delete, parameter[self, bundleId]]:
constant[
Delete a device management extension package
It accepts bundleId (string) as parameters
In case of failure it throws APIException
]
variable[url] assign[=] binary_operation[constant[api/v0002/mgmt/custom/bundle/%s] <ast.Mod object at 0x7da2590d6920> name[bundleId]]
variable[r] assign[=] call[name[self]._apiClient.delete, parameter[name[url]]]
if compare[name[r].status_code equal[==] constant[204]] begin[:]
return[constant[True]]
|
keyword[def] identifier[delete] ( identifier[self] , identifier[bundleId] ):
literal[string]
identifier[url] = literal[string] %( identifier[bundleId] )
identifier[r] = identifier[self] . identifier[_apiClient] . identifier[delete] ( identifier[url] )
keyword[if] identifier[r] . identifier[status_code] == literal[int] :
keyword[return] keyword[True]
keyword[else] :
keyword[raise] identifier[ApiException] ( identifier[r] )
|
def delete(self, bundleId):
"""
Delete a device management extension package
It accepts bundleId (string) as parameters
In case of failure it throws APIException
"""
url = 'api/v0002/mgmt/custom/bundle/%s' % bundleId
r = self._apiClient.delete(url)
if r.status_code == 204:
return True # depends on [control=['if'], data=[]]
else:
raise ApiException(r)
|
def _get_platform_name(ncattr):
"""Determine name of the platform"""
match = re.match(r'G-(\d+)', ncattr)
if match:
return SPACECRAFTS.get(int(match.groups()[0]))
return None
|
def function[_get_platform_name, parameter[ncattr]]:
constant[Determine name of the platform]
variable[match] assign[=] call[name[re].match, parameter[constant[G-(\d+)], name[ncattr]]]
if name[match] begin[:]
return[call[name[SPACECRAFTS].get, parameter[call[name[int], parameter[call[call[name[match].groups, parameter[]]][constant[0]]]]]]]
return[constant[None]]
|
keyword[def] identifier[_get_platform_name] ( identifier[ncattr] ):
literal[string]
identifier[match] = identifier[re] . identifier[match] ( literal[string] , identifier[ncattr] )
keyword[if] identifier[match] :
keyword[return] identifier[SPACECRAFTS] . identifier[get] ( identifier[int] ( identifier[match] . identifier[groups] ()[ literal[int] ]))
keyword[return] keyword[None]
|
def _get_platform_name(ncattr):
"""Determine name of the platform"""
match = re.match('G-(\\d+)', ncattr)
if match:
return SPACECRAFTS.get(int(match.groups()[0])) # depends on [control=['if'], data=[]]
return None
|
def _variants_dtype(fields, dtypes, arities, filter_ids, flatten_filter,
info_types):
"""Utility function to build a numpy dtype for a variants array,
given user arguments and information available from VCF header."""
dtype = list()
for f, n, vcf_type in zip(fields, arities, info_types):
if f == 'FILTER' and flatten_filter:
# split FILTER into multiple boolean fields
for flt in filter_ids:
nm = 'FILTER_' + flt
dtype.append((nm, 'b1'))
elif f == 'FILTER' and not flatten_filter:
# represent FILTER as a structured field
t = [(flt, 'b1') for flt in filter_ids]
dtype.append((f, t))
else:
if dtypes is not None and f in dtypes:
# user overrides default dtype
t = dtypes[f]
elif f in config.STANDARD_VARIANT_FIELDS:
t = config.DEFAULT_VARIANT_DTYPE[f]
elif f in config.DEFAULT_INFO_DTYPE:
# known INFO field
t = config.DEFAULT_INFO_DTYPE[f]
else:
t = config.DEFAULT_TYPE_MAP[vcf_type]
# deal with arity
if n == 1:
dtype.append((f, t))
else:
dtype.append((f, t, (n,)))
return dtype
|
def function[_variants_dtype, parameter[fields, dtypes, arities, filter_ids, flatten_filter, info_types]]:
constant[Utility function to build a numpy dtype for a variants array,
given user arguments and information available from VCF header.]
variable[dtype] assign[=] call[name[list], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b25624d0>, <ast.Name object at 0x7da1b25617b0>, <ast.Name object at 0x7da1b2562710>]]] in starred[call[name[zip], parameter[name[fields], name[arities], name[info_types]]]] begin[:]
if <ast.BoolOp object at 0x7da1b2560760> begin[:]
for taget[name[flt]] in starred[name[filter_ids]] begin[:]
variable[nm] assign[=] binary_operation[constant[FILTER_] + name[flt]]
call[name[dtype].append, parameter[tuple[[<ast.Name object at 0x7da1b2560d30>, <ast.Constant object at 0x7da1b2561e70>]]]]
return[name[dtype]]
|
keyword[def] identifier[_variants_dtype] ( identifier[fields] , identifier[dtypes] , identifier[arities] , identifier[filter_ids] , identifier[flatten_filter] ,
identifier[info_types] ):
literal[string]
identifier[dtype] = identifier[list] ()
keyword[for] identifier[f] , identifier[n] , identifier[vcf_type] keyword[in] identifier[zip] ( identifier[fields] , identifier[arities] , identifier[info_types] ):
keyword[if] identifier[f] == literal[string] keyword[and] identifier[flatten_filter] :
keyword[for] identifier[flt] keyword[in] identifier[filter_ids] :
identifier[nm] = literal[string] + identifier[flt]
identifier[dtype] . identifier[append] (( identifier[nm] , literal[string] ))
keyword[elif] identifier[f] == literal[string] keyword[and] keyword[not] identifier[flatten_filter] :
identifier[t] =[( identifier[flt] , literal[string] ) keyword[for] identifier[flt] keyword[in] identifier[filter_ids] ]
identifier[dtype] . identifier[append] (( identifier[f] , identifier[t] ))
keyword[else] :
keyword[if] identifier[dtypes] keyword[is] keyword[not] keyword[None] keyword[and] identifier[f] keyword[in] identifier[dtypes] :
identifier[t] = identifier[dtypes] [ identifier[f] ]
keyword[elif] identifier[f] keyword[in] identifier[config] . identifier[STANDARD_VARIANT_FIELDS] :
identifier[t] = identifier[config] . identifier[DEFAULT_VARIANT_DTYPE] [ identifier[f] ]
keyword[elif] identifier[f] keyword[in] identifier[config] . identifier[DEFAULT_INFO_DTYPE] :
identifier[t] = identifier[config] . identifier[DEFAULT_INFO_DTYPE] [ identifier[f] ]
keyword[else] :
identifier[t] = identifier[config] . identifier[DEFAULT_TYPE_MAP] [ identifier[vcf_type] ]
keyword[if] identifier[n] == literal[int] :
identifier[dtype] . identifier[append] (( identifier[f] , identifier[t] ))
keyword[else] :
identifier[dtype] . identifier[append] (( identifier[f] , identifier[t] ,( identifier[n] ,)))
keyword[return] identifier[dtype]
|
def _variants_dtype(fields, dtypes, arities, filter_ids, flatten_filter, info_types):
"""Utility function to build a numpy dtype for a variants array,
given user arguments and information available from VCF header."""
dtype = list()
for (f, n, vcf_type) in zip(fields, arities, info_types):
if f == 'FILTER' and flatten_filter:
# split FILTER into multiple boolean fields
for flt in filter_ids:
nm = 'FILTER_' + flt
dtype.append((nm, 'b1')) # depends on [control=['for'], data=['flt']] # depends on [control=['if'], data=[]]
elif f == 'FILTER' and (not flatten_filter):
# represent FILTER as a structured field
t = [(flt, 'b1') for flt in filter_ids]
dtype.append((f, t)) # depends on [control=['if'], data=[]]
else:
if dtypes is not None and f in dtypes:
# user overrides default dtype
t = dtypes[f] # depends on [control=['if'], data=[]]
elif f in config.STANDARD_VARIANT_FIELDS:
t = config.DEFAULT_VARIANT_DTYPE[f] # depends on [control=['if'], data=['f']]
elif f in config.DEFAULT_INFO_DTYPE:
# known INFO field
t = config.DEFAULT_INFO_DTYPE[f] # depends on [control=['if'], data=['f']]
else:
t = config.DEFAULT_TYPE_MAP[vcf_type]
# deal with arity
if n == 1:
dtype.append((f, t)) # depends on [control=['if'], data=[]]
else:
dtype.append((f, t, (n,))) # depends on [control=['for'], data=[]]
return dtype
|
def plot(self, sizescale=10, color=None, alpha=0.5, label=None, edgecolor='none', **kw):
'''
Plot the ra and dec of the coordinates,
at a given epoch, scaled by their magnitude.
(This does *not* create a new empty figure.)
Parameters
----------
sizescale : (optional) float
The marker size for scatter for a star at the magnitudelimit.
color : (optional) any valid color
The color to plot (but there is a default for this catalog.)
**kw : dict
Additional keywords will be passed on to plt.scatter.
Returns
-------
plotted : outputs from the plots
'''
# calculate the sizes of the stars (logarithmic with brightness?)
size = np.maximum(sizescale*(1 + self.magnitudelimit - self.magnitude), 1)
# make a scatter plot of the RA + Dec
scatter = plt.scatter(self.ra, self.dec,
s=size,
color=color or self.color,
label=label or '{} ({:.1f})'.format(self.name, self.epoch),
alpha=alpha,
edgecolor=edgecolor,
**kw)
return scatter
|
def function[plot, parameter[self, sizescale, color, alpha, label, edgecolor]]:
constant[
Plot the ra and dec of the coordinates,
at a given epoch, scaled by their magnitude.
(This does *not* create a new empty figure.)
Parameters
----------
sizescale : (optional) float
The marker size for scatter for a star at the magnitudelimit.
color : (optional) any valid color
The color to plot (but there is a default for this catalog.)
**kw : dict
Additional keywords will be passed on to plt.scatter.
Returns
-------
plotted : outputs from the plots
]
variable[size] assign[=] call[name[np].maximum, parameter[binary_operation[name[sizescale] * binary_operation[binary_operation[constant[1] + name[self].magnitudelimit] - name[self].magnitude]], constant[1]]]
variable[scatter] assign[=] call[name[plt].scatter, parameter[name[self].ra, name[self].dec]]
return[name[scatter]]
|
keyword[def] identifier[plot] ( identifier[self] , identifier[sizescale] = literal[int] , identifier[color] = keyword[None] , identifier[alpha] = literal[int] , identifier[label] = keyword[None] , identifier[edgecolor] = literal[string] ,** identifier[kw] ):
literal[string]
identifier[size] = identifier[np] . identifier[maximum] ( identifier[sizescale] *( literal[int] + identifier[self] . identifier[magnitudelimit] - identifier[self] . identifier[magnitude] ), literal[int] )
identifier[scatter] = identifier[plt] . identifier[scatter] ( identifier[self] . identifier[ra] , identifier[self] . identifier[dec] ,
identifier[s] = identifier[size] ,
identifier[color] = identifier[color] keyword[or] identifier[self] . identifier[color] ,
identifier[label] = identifier[label] keyword[or] literal[string] . identifier[format] ( identifier[self] . identifier[name] , identifier[self] . identifier[epoch] ),
identifier[alpha] = identifier[alpha] ,
identifier[edgecolor] = identifier[edgecolor] ,
** identifier[kw] )
keyword[return] identifier[scatter]
|
def plot(self, sizescale=10, color=None, alpha=0.5, label=None, edgecolor='none', **kw):
"""
Plot the ra and dec of the coordinates,
at a given epoch, scaled by their magnitude.
(This does *not* create a new empty figure.)
Parameters
----------
sizescale : (optional) float
The marker size for scatter for a star at the magnitudelimit.
color : (optional) any valid color
The color to plot (but there is a default for this catalog.)
**kw : dict
Additional keywords will be passed on to plt.scatter.
Returns
-------
plotted : outputs from the plots
"""
# calculate the sizes of the stars (logarithmic with brightness?)
size = np.maximum(sizescale * (1 + self.magnitudelimit - self.magnitude), 1)
# make a scatter plot of the RA + Dec
scatter = plt.scatter(self.ra, self.dec, s=size, color=color or self.color, label=label or '{} ({:.1f})'.format(self.name, self.epoch), alpha=alpha, edgecolor=edgecolor, **kw)
return scatter
|
def _cursor(self, *args, **kwargs):
"""A "tough" version of the method cursor()."""
# The args and kwargs are not part of the standard,
# but some database modules seem to use these.
transaction = self._transaction
if not transaction:
self._ping_check(2)
try:
if self._maxusage:
if self._usage >= self._maxusage:
# the connection was used too often
raise self._failure
cursor = self._con.cursor(*args, **kwargs) # try to get a cursor
except self._failures as error: # error in getting cursor
try: # try to reopen the connection
con = self._create()
except Exception:
pass
else:
try: # and try one more time to get a cursor
cursor = con.cursor(*args, **kwargs)
except Exception:
pass
else:
self._close()
self._store(con)
if transaction:
raise error # re-raise the original error again
return cursor
try:
con.close()
except Exception:
pass
if transaction:
self._transaction = False
raise error # re-raise the original error again
return cursor
|
def function[_cursor, parameter[self]]:
constant[A "tough" version of the method cursor().]
variable[transaction] assign[=] name[self]._transaction
if <ast.UnaryOp object at 0x7da20c993820> begin[:]
call[name[self]._ping_check, parameter[constant[2]]]
<ast.Try object at 0x7da20c993c10>
return[name[cursor]]
|
keyword[def] identifier[_cursor] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[transaction] = identifier[self] . identifier[_transaction]
keyword[if] keyword[not] identifier[transaction] :
identifier[self] . identifier[_ping_check] ( literal[int] )
keyword[try] :
keyword[if] identifier[self] . identifier[_maxusage] :
keyword[if] identifier[self] . identifier[_usage] >= identifier[self] . identifier[_maxusage] :
keyword[raise] identifier[self] . identifier[_failure]
identifier[cursor] = identifier[self] . identifier[_con] . identifier[cursor] (* identifier[args] ,** identifier[kwargs] )
keyword[except] identifier[self] . identifier[_failures] keyword[as] identifier[error] :
keyword[try] :
identifier[con] = identifier[self] . identifier[_create] ()
keyword[except] identifier[Exception] :
keyword[pass]
keyword[else] :
keyword[try] :
identifier[cursor] = identifier[con] . identifier[cursor] (* identifier[args] ,** identifier[kwargs] )
keyword[except] identifier[Exception] :
keyword[pass]
keyword[else] :
identifier[self] . identifier[_close] ()
identifier[self] . identifier[_store] ( identifier[con] )
keyword[if] identifier[transaction] :
keyword[raise] identifier[error]
keyword[return] identifier[cursor]
keyword[try] :
identifier[con] . identifier[close] ()
keyword[except] identifier[Exception] :
keyword[pass]
keyword[if] identifier[transaction] :
identifier[self] . identifier[_transaction] = keyword[False]
keyword[raise] identifier[error]
keyword[return] identifier[cursor]
|
def _cursor(self, *args, **kwargs):
"""A "tough" version of the method cursor()."""
# The args and kwargs are not part of the standard,
# but some database modules seem to use these.
transaction = self._transaction
if not transaction:
self._ping_check(2) # depends on [control=['if'], data=[]]
try:
if self._maxusage:
if self._usage >= self._maxusage:
# the connection was used too often
raise self._failure # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
cursor = self._con.cursor(*args, **kwargs) # try to get a cursor # depends on [control=['try'], data=[]]
except self._failures as error: # error in getting cursor
try: # try to reopen the connection
con = self._create() # depends on [control=['try'], data=[]]
except Exception:
pass # depends on [control=['except'], data=[]]
else:
try: # and try one more time to get a cursor
cursor = con.cursor(*args, **kwargs) # depends on [control=['try'], data=[]]
except Exception:
pass # depends on [control=['except'], data=[]]
else:
self._close()
self._store(con)
if transaction:
raise error # re-raise the original error again # depends on [control=['if'], data=[]]
return cursor
try:
con.close() # depends on [control=['try'], data=[]]
except Exception:
pass # depends on [control=['except'], data=[]]
if transaction:
self._transaction = False # depends on [control=['if'], data=[]]
raise error # re-raise the original error again # depends on [control=['except'], data=['error']]
return cursor
|
def save(self, name, content, *args, **kwargs):
"""
Save the file, also saving a reference to the thumbnail cache Source
model.
"""
super(ThumbnailerFieldFile, self).save(name, content, *args, **kwargs)
self.get_source_cache(create=True, update=True)
|
def function[save, parameter[self, name, content]]:
constant[
Save the file, also saving a reference to the thumbnail cache Source
model.
]
call[call[name[super], parameter[name[ThumbnailerFieldFile], name[self]]].save, parameter[name[name], name[content], <ast.Starred object at 0x7da20c9931f0>]]
call[name[self].get_source_cache, parameter[]]
|
keyword[def] identifier[save] ( identifier[self] , identifier[name] , identifier[content] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[super] ( identifier[ThumbnailerFieldFile] , identifier[self] ). identifier[save] ( identifier[name] , identifier[content] ,* identifier[args] ,** identifier[kwargs] )
identifier[self] . identifier[get_source_cache] ( identifier[create] = keyword[True] , identifier[update] = keyword[True] )
|
def save(self, name, content, *args, **kwargs):
"""
Save the file, also saving a reference to the thumbnail cache Source
model.
"""
super(ThumbnailerFieldFile, self).save(name, content, *args, **kwargs)
self.get_source_cache(create=True, update=True)
|
def get_sdb_id(self, sdb):
""" Return the ID for the given safe deposit box.
Keyword arguments:
sdb -- This is the name of the safe deposit box, not the path"""
json_resp = self.get_sdbs()
for r in json_resp:
if r['name'] == sdb:
return str(r['id'])
# If we haven't returned yet then we didn't find what we were
# looking for.
raise CerberusClientException("'%s' not found" % sdb)
|
def function[get_sdb_id, parameter[self, sdb]]:
constant[ Return the ID for the given safe deposit box.
Keyword arguments:
sdb -- This is the name of the safe deposit box, not the path]
variable[json_resp] assign[=] call[name[self].get_sdbs, parameter[]]
for taget[name[r]] in starred[name[json_resp]] begin[:]
if compare[call[name[r]][constant[name]] equal[==] name[sdb]] begin[:]
return[call[name[str], parameter[call[name[r]][constant[id]]]]]
<ast.Raise object at 0x7da1b04a5960>
|
keyword[def] identifier[get_sdb_id] ( identifier[self] , identifier[sdb] ):
literal[string]
identifier[json_resp] = identifier[self] . identifier[get_sdbs] ()
keyword[for] identifier[r] keyword[in] identifier[json_resp] :
keyword[if] identifier[r] [ literal[string] ]== identifier[sdb] :
keyword[return] identifier[str] ( identifier[r] [ literal[string] ])
keyword[raise] identifier[CerberusClientException] ( literal[string] % identifier[sdb] )
|
def get_sdb_id(self, sdb):
""" Return the ID for the given safe deposit box.
Keyword arguments:
sdb -- This is the name of the safe deposit box, not the path"""
json_resp = self.get_sdbs()
for r in json_resp:
if r['name'] == sdb:
return str(r['id']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['r']]
# If we haven't returned yet then we didn't find what we were
# looking for.
raise CerberusClientException("'%s' not found" % sdb)
|
def hexdigest(self, data=None):
"""
Returns digest in the hexadecimal form. For compatibility
with hashlib
"""
from base64 import b16encode
if pyver == 2:
return b16encode(self.digest(data))
else:
return b16encode(self.digest(data)).decode('us-ascii')
|
def function[hexdigest, parameter[self, data]]:
constant[
Returns digest in the hexadecimal form. For compatibility
with hashlib
]
from relative_module[base64] import module[b16encode]
if compare[name[pyver] equal[==] constant[2]] begin[:]
return[call[name[b16encode], parameter[call[name[self].digest, parameter[name[data]]]]]]
|
keyword[def] identifier[hexdigest] ( identifier[self] , identifier[data] = keyword[None] ):
literal[string]
keyword[from] identifier[base64] keyword[import] identifier[b16encode]
keyword[if] identifier[pyver] == literal[int] :
keyword[return] identifier[b16encode] ( identifier[self] . identifier[digest] ( identifier[data] ))
keyword[else] :
keyword[return] identifier[b16encode] ( identifier[self] . identifier[digest] ( identifier[data] )). identifier[decode] ( literal[string] )
|
def hexdigest(self, data=None):
"""
Returns digest in the hexadecimal form. For compatibility
with hashlib
"""
from base64 import b16encode
if pyver == 2:
return b16encode(self.digest(data)) # depends on [control=['if'], data=[]]
else:
return b16encode(self.digest(data)).decode('us-ascii')
|
def pretty_print(self, objlist):
'''Pretty print the result of s3walk. Here we calculate the maximum width
of each column and align them.
'''
def normalize_time(timestamp):
'''Normalize the timestamp format for pretty print.'''
if timestamp is None:
return ' ' * 16
return TIMESTAMP_FORMAT % (timestamp.year, timestamp.month, timestamp.day, timestamp.hour, timestamp.minute)
cwidth = [0, 0, 0]
format = '%%%ds %%%ds %%-%ds'
# Calculate maximum width for each column.
result = []
for obj in objlist:
last_modified = normalize_time(obj['last_modified'])
size = str(obj['size']) if not obj['is_dir'] else 'DIR'
name = obj['name']
item = (last_modified, size, name)
for i, value in enumerate(item):
if cwidth[i] < len(value):
cwidth[i] = len(value)
result.append(item)
# Format output.
for item in result:
text = (format % tuple(cwidth)) % item
message('%s', text.rstrip())
|
def function[pretty_print, parameter[self, objlist]]:
constant[Pretty print the result of s3walk. Here we calculate the maximum width
of each column and align them.
]
def function[normalize_time, parameter[timestamp]]:
constant[Normalize the timestamp format for pretty print.]
if compare[name[timestamp] is constant[None]] begin[:]
return[binary_operation[constant[ ] * constant[16]]]
return[binary_operation[name[TIMESTAMP_FORMAT] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b0247bb0>, <ast.Attribute object at 0x7da1b0247c10>, <ast.Attribute object at 0x7da1b0247c40>, <ast.Attribute object at 0x7da1b0247a90>, <ast.Attribute object at 0x7da1b0247a60>]]]]
variable[cwidth] assign[=] list[[<ast.Constant object at 0x7da1b0245d80>, <ast.Constant object at 0x7da1b0245db0>, <ast.Constant object at 0x7da1b0245de0>]]
variable[format] assign[=] constant[%%%ds %%%ds %%-%ds]
variable[result] assign[=] list[[]]
for taget[name[obj]] in starred[name[objlist]] begin[:]
variable[last_modified] assign[=] call[name[normalize_time], parameter[call[name[obj]][constant[last_modified]]]]
variable[size] assign[=] <ast.IfExp object at 0x7da1b02f3d60>
variable[name] assign[=] call[name[obj]][constant[name]]
variable[item] assign[=] tuple[[<ast.Name object at 0x7da1b02f2d40>, <ast.Name object at 0x7da1b02f3430>, <ast.Name object at 0x7da1b02f1600>]]
for taget[tuple[[<ast.Name object at 0x7da1b02f2a70>, <ast.Name object at 0x7da1b02f3550>]]] in starred[call[name[enumerate], parameter[name[item]]]] begin[:]
if compare[call[name[cwidth]][name[i]] less[<] call[name[len], parameter[name[value]]]] begin[:]
call[name[cwidth]][name[i]] assign[=] call[name[len], parameter[name[value]]]
call[name[result].append, parameter[name[item]]]
for taget[name[item]] in starred[name[result]] begin[:]
variable[text] assign[=] binary_operation[binary_operation[name[format] <ast.Mod object at 0x7da2590d6920> call[name[tuple], parameter[name[cwidth]]]] <ast.Mod object at 0x7da2590d6920> name[item]]
call[name[message], parameter[constant[%s], call[name[text].rstrip, parameter[]]]]
|
keyword[def] identifier[pretty_print] ( identifier[self] , identifier[objlist] ):
literal[string]
keyword[def] identifier[normalize_time] ( identifier[timestamp] ):
literal[string]
keyword[if] identifier[timestamp] keyword[is] keyword[None] :
keyword[return] literal[string] * literal[int]
keyword[return] identifier[TIMESTAMP_FORMAT] %( identifier[timestamp] . identifier[year] , identifier[timestamp] . identifier[month] , identifier[timestamp] . identifier[day] , identifier[timestamp] . identifier[hour] , identifier[timestamp] . identifier[minute] )
identifier[cwidth] =[ literal[int] , literal[int] , literal[int] ]
identifier[format] = literal[string]
identifier[result] =[]
keyword[for] identifier[obj] keyword[in] identifier[objlist] :
identifier[last_modified] = identifier[normalize_time] ( identifier[obj] [ literal[string] ])
identifier[size] = identifier[str] ( identifier[obj] [ literal[string] ]) keyword[if] keyword[not] identifier[obj] [ literal[string] ] keyword[else] literal[string]
identifier[name] = identifier[obj] [ literal[string] ]
identifier[item] =( identifier[last_modified] , identifier[size] , identifier[name] )
keyword[for] identifier[i] , identifier[value] keyword[in] identifier[enumerate] ( identifier[item] ):
keyword[if] identifier[cwidth] [ identifier[i] ]< identifier[len] ( identifier[value] ):
identifier[cwidth] [ identifier[i] ]= identifier[len] ( identifier[value] )
identifier[result] . identifier[append] ( identifier[item] )
keyword[for] identifier[item] keyword[in] identifier[result] :
identifier[text] =( identifier[format] % identifier[tuple] ( identifier[cwidth] ))% identifier[item]
identifier[message] ( literal[string] , identifier[text] . identifier[rstrip] ())
|
def pretty_print(self, objlist):
"""Pretty print the result of s3walk. Here we calculate the maximum width
of each column and align them.
"""
def normalize_time(timestamp):
"""Normalize the timestamp format for pretty print."""
if timestamp is None:
return ' ' * 16 # depends on [control=['if'], data=[]]
return TIMESTAMP_FORMAT % (timestamp.year, timestamp.month, timestamp.day, timestamp.hour, timestamp.minute)
cwidth = [0, 0, 0]
format = '%%%ds %%%ds %%-%ds'
# Calculate maximum width for each column.
result = []
for obj in objlist:
last_modified = normalize_time(obj['last_modified'])
size = str(obj['size']) if not obj['is_dir'] else 'DIR'
name = obj['name']
item = (last_modified, size, name)
for (i, value) in enumerate(item):
if cwidth[i] < len(value):
cwidth[i] = len(value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
result.append(item) # depends on [control=['for'], data=['obj']]
# Format output.
for item in result:
text = format % tuple(cwidth) % item
message('%s', text.rstrip()) # depends on [control=['for'], data=['item']]
|
def do_reset(self, line):
"""reset Set all session variables to their default values."""
self._split_args(line, 0, 0)
self._command_processor.get_session().reset()
self._print_info_if_verbose("Successfully reset session variables")
|
def function[do_reset, parameter[self, line]]:
constant[reset Set all session variables to their default values.]
call[name[self]._split_args, parameter[name[line], constant[0], constant[0]]]
call[call[name[self]._command_processor.get_session, parameter[]].reset, parameter[]]
call[name[self]._print_info_if_verbose, parameter[constant[Successfully reset session variables]]]
|
keyword[def] identifier[do_reset] ( identifier[self] , identifier[line] ):
literal[string]
identifier[self] . identifier[_split_args] ( identifier[line] , literal[int] , literal[int] )
identifier[self] . identifier[_command_processor] . identifier[get_session] (). identifier[reset] ()
identifier[self] . identifier[_print_info_if_verbose] ( literal[string] )
|
def do_reset(self, line):
"""reset Set all session variables to their default values."""
self._split_args(line, 0, 0)
self._command_processor.get_session().reset()
self._print_info_if_verbose('Successfully reset session variables')
|
def prompt_choices(name, choices, default=None, no_choice=('none',)):
"""
Grabs user input from command line from set of provided choices.
:param name: prompt text
:param choices: list or tuple of available choices.
:param default: default value if no input provided.
:param no_choice: acceptable list of strings for "null choice"
"""
_choices = []
options = []
for choice in choices:
options.append(choice)
_choices.append(choice)
while True:
rv = prompt(name + '? - (%s)' % ', '.join(options), default)
rv = rv.lower()
if rv in no_choice:
return None
if rv in _choices:
return rv
|
def function[prompt_choices, parameter[name, choices, default, no_choice]]:
constant[
Grabs user input from command line from set of provided choices.
:param name: prompt text
:param choices: list or tuple of available choices.
:param default: default value if no input provided.
:param no_choice: acceptable list of strings for "null choice"
]
variable[_choices] assign[=] list[[]]
variable[options] assign[=] list[[]]
for taget[name[choice]] in starred[name[choices]] begin[:]
call[name[options].append, parameter[name[choice]]]
call[name[_choices].append, parameter[name[choice]]]
while constant[True] begin[:]
variable[rv] assign[=] call[name[prompt], parameter[binary_operation[name[name] + binary_operation[constant[? - (%s)] <ast.Mod object at 0x7da2590d6920> call[constant[, ].join, parameter[name[options]]]]], name[default]]]
variable[rv] assign[=] call[name[rv].lower, parameter[]]
if compare[name[rv] in name[no_choice]] begin[:]
return[constant[None]]
if compare[name[rv] in name[_choices]] begin[:]
return[name[rv]]
|
keyword[def] identifier[prompt_choices] ( identifier[name] , identifier[choices] , identifier[default] = keyword[None] , identifier[no_choice] =( literal[string] ,)):
literal[string]
identifier[_choices] =[]
identifier[options] =[]
keyword[for] identifier[choice] keyword[in] identifier[choices] :
identifier[options] . identifier[append] ( identifier[choice] )
identifier[_choices] . identifier[append] ( identifier[choice] )
keyword[while] keyword[True] :
identifier[rv] = identifier[prompt] ( identifier[name] + literal[string] % literal[string] . identifier[join] ( identifier[options] ), identifier[default] )
identifier[rv] = identifier[rv] . identifier[lower] ()
keyword[if] identifier[rv] keyword[in] identifier[no_choice] :
keyword[return] keyword[None]
keyword[if] identifier[rv] keyword[in] identifier[_choices] :
keyword[return] identifier[rv]
|
def prompt_choices(name, choices, default=None, no_choice=('none',)):
"""
Grabs user input from command line from set of provided choices.
:param name: prompt text
:param choices: list or tuple of available choices.
:param default: default value if no input provided.
:param no_choice: acceptable list of strings for "null choice"
"""
_choices = []
options = []
for choice in choices:
options.append(choice)
_choices.append(choice) # depends on [control=['for'], data=['choice']]
while True:
rv = prompt(name + '? - (%s)' % ', '.join(options), default)
rv = rv.lower()
if rv in no_choice:
return None # depends on [control=['if'], data=[]]
if rv in _choices:
return rv # depends on [control=['if'], data=['rv']] # depends on [control=['while'], data=[]]
|
def cleanup(self, sched, coro):
"""Remove this coro from the waiting for signal queue."""
try:
sched.sigwait[self.name].remove((self, coro))
except ValueError:
pass
return True
|
def function[cleanup, parameter[self, sched, coro]]:
constant[Remove this coro from the waiting for signal queue.]
<ast.Try object at 0x7da204623580>
return[constant[True]]
|
keyword[def] identifier[cleanup] ( identifier[self] , identifier[sched] , identifier[coro] ):
literal[string]
keyword[try] :
identifier[sched] . identifier[sigwait] [ identifier[self] . identifier[name] ]. identifier[remove] (( identifier[self] , identifier[coro] ))
keyword[except] identifier[ValueError] :
keyword[pass]
keyword[return] keyword[True]
|
def cleanup(self, sched, coro):
"""Remove this coro from the waiting for signal queue."""
try:
sched.sigwait[self.name].remove((self, coro)) # depends on [control=['try'], data=[]]
except ValueError:
pass # depends on [control=['except'], data=[]]
return True
|
def generate_pymol_session(self, pymol_executable = 'pymol', settings = {}):
''' Generates the PyMOL session for the scaffold, model, and design structures.
Returns this session and the script which generated it.'''
b = BatchBuilder(pymol_executable = pymol_executable)
if self.scaffold_pdb:
structures_list = [
('Scaffold', self.scaffold_pdb.pdb_content, self.get_differing_scaffold_residue_ids()),
('Model', self.model_pdb.pdb_content, self.get_differing_model_residue_ids()),
('ExpStructure', self.design_pdb.pdb_content, self.get_differing_design_residue_ids ()),
]
else:
structures_list = [
('Model', self.model_pdb.pdb_content, self.get_differing_model_residue_ids()),
('ExpStructure', self.design_pdb.pdb_content, self.get_differing_design_residue_ids ()),
]
PSE_files = b.run(ScaffoldModelDesignBuilder, [PDBContainer.from_content_triple(structures_list)], settings = settings)
return PSE_files[0], b.PSE_scripts[0]
|
def function[generate_pymol_session, parameter[self, pymol_executable, settings]]:
constant[ Generates the PyMOL session for the scaffold, model, and design structures.
Returns this session and the script which generated it.]
variable[b] assign[=] call[name[BatchBuilder], parameter[]]
if name[self].scaffold_pdb begin[:]
variable[structures_list] assign[=] list[[<ast.Tuple object at 0x7da18bccac20>, <ast.Tuple object at 0x7da18bccae00>, <ast.Tuple object at 0x7da18bccbf70>]]
variable[PSE_files] assign[=] call[name[b].run, parameter[name[ScaffoldModelDesignBuilder], list[[<ast.Call object at 0x7da1b23d66b0>]]]]
return[tuple[[<ast.Subscript object at 0x7da1b23d5060>, <ast.Subscript object at 0x7da1b23d79d0>]]]
|
keyword[def] identifier[generate_pymol_session] ( identifier[self] , identifier[pymol_executable] = literal[string] , identifier[settings] ={}):
literal[string]
identifier[b] = identifier[BatchBuilder] ( identifier[pymol_executable] = identifier[pymol_executable] )
keyword[if] identifier[self] . identifier[scaffold_pdb] :
identifier[structures_list] =[
( literal[string] , identifier[self] . identifier[scaffold_pdb] . identifier[pdb_content] , identifier[self] . identifier[get_differing_scaffold_residue_ids] ()),
( literal[string] , identifier[self] . identifier[model_pdb] . identifier[pdb_content] , identifier[self] . identifier[get_differing_model_residue_ids] ()),
( literal[string] , identifier[self] . identifier[design_pdb] . identifier[pdb_content] , identifier[self] . identifier[get_differing_design_residue_ids] ()),
]
keyword[else] :
identifier[structures_list] =[
( literal[string] , identifier[self] . identifier[model_pdb] . identifier[pdb_content] , identifier[self] . identifier[get_differing_model_residue_ids] ()),
( literal[string] , identifier[self] . identifier[design_pdb] . identifier[pdb_content] , identifier[self] . identifier[get_differing_design_residue_ids] ()),
]
identifier[PSE_files] = identifier[b] . identifier[run] ( identifier[ScaffoldModelDesignBuilder] ,[ identifier[PDBContainer] . identifier[from_content_triple] ( identifier[structures_list] )], identifier[settings] = identifier[settings] )
keyword[return] identifier[PSE_files] [ literal[int] ], identifier[b] . identifier[PSE_scripts] [ literal[int] ]
|
def generate_pymol_session(self, pymol_executable='pymol', settings={}):
""" Generates the PyMOL session for the scaffold, model, and design structures.
Returns this session and the script which generated it."""
b = BatchBuilder(pymol_executable=pymol_executable)
if self.scaffold_pdb:
structures_list = [('Scaffold', self.scaffold_pdb.pdb_content, self.get_differing_scaffold_residue_ids()), ('Model', self.model_pdb.pdb_content, self.get_differing_model_residue_ids()), ('ExpStructure', self.design_pdb.pdb_content, self.get_differing_design_residue_ids())] # depends on [control=['if'], data=[]]
else:
structures_list = [('Model', self.model_pdb.pdb_content, self.get_differing_model_residue_ids()), ('ExpStructure', self.design_pdb.pdb_content, self.get_differing_design_residue_ids())]
PSE_files = b.run(ScaffoldModelDesignBuilder, [PDBContainer.from_content_triple(structures_list)], settings=settings)
return (PSE_files[0], b.PSE_scripts[0])
|
def _trigger_event(self, event, *args, **kwargs):
"""Invoke an event handler."""
run_async = kwargs.pop('run_async', False)
if event in self.handlers:
if run_async:
return self.start_background_task(self.handlers[event], *args)
else:
try:
return self.handlers[event](*args)
except:
self.logger.exception(event + ' handler error')
|
def function[_trigger_event, parameter[self, event]]:
constant[Invoke an event handler.]
variable[run_async] assign[=] call[name[kwargs].pop, parameter[constant[run_async], constant[False]]]
if compare[name[event] in name[self].handlers] begin[:]
if name[run_async] begin[:]
return[call[name[self].start_background_task, parameter[call[name[self].handlers][name[event]], <ast.Starred object at 0x7da1b08a3b50>]]]
|
keyword[def] identifier[_trigger_event] ( identifier[self] , identifier[event] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[run_async] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[False] )
keyword[if] identifier[event] keyword[in] identifier[self] . identifier[handlers] :
keyword[if] identifier[run_async] :
keyword[return] identifier[self] . identifier[start_background_task] ( identifier[self] . identifier[handlers] [ identifier[event] ],* identifier[args] )
keyword[else] :
keyword[try] :
keyword[return] identifier[self] . identifier[handlers] [ identifier[event] ](* identifier[args] )
keyword[except] :
identifier[self] . identifier[logger] . identifier[exception] ( identifier[event] + literal[string] )
|
def _trigger_event(self, event, *args, **kwargs):
"""Invoke an event handler."""
run_async = kwargs.pop('run_async', False)
if event in self.handlers:
if run_async:
return self.start_background_task(self.handlers[event], *args) # depends on [control=['if'], data=[]]
else:
try:
return self.handlers[event](*args) # depends on [control=['try'], data=[]]
except:
self.logger.exception(event + ' handler error') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['event']]
|
def dump(self):
"""Dump parsed environment variables to a dictionary of simple data types (numbers
and strings).
"""
schema = _dict2schema(self._fields)()
dump_result = schema.dump(self._values)
return dump_result.data if MARSHMALLOW_VERSION_INFO[0] < 3 else dump_result
|
def function[dump, parameter[self]]:
constant[Dump parsed environment variables to a dictionary of simple data types (numbers
and strings).
]
variable[schema] assign[=] call[call[name[_dict2schema], parameter[name[self]._fields]], parameter[]]
variable[dump_result] assign[=] call[name[schema].dump, parameter[name[self]._values]]
return[<ast.IfExp object at 0x7da1b1e96f80>]
|
keyword[def] identifier[dump] ( identifier[self] ):
literal[string]
identifier[schema] = identifier[_dict2schema] ( identifier[self] . identifier[_fields] )()
identifier[dump_result] = identifier[schema] . identifier[dump] ( identifier[self] . identifier[_values] )
keyword[return] identifier[dump_result] . identifier[data] keyword[if] identifier[MARSHMALLOW_VERSION_INFO] [ literal[int] ]< literal[int] keyword[else] identifier[dump_result]
|
def dump(self):
"""Dump parsed environment variables to a dictionary of simple data types (numbers
and strings).
"""
schema = _dict2schema(self._fields)()
dump_result = schema.dump(self._values)
return dump_result.data if MARSHMALLOW_VERSION_INFO[0] < 3 else dump_result
|
def as_dict(self):
"""
Return the contents as dictionary, for client-side export.
The dictionary contains the fields:
* ``slot``
* ``title``
* ``role``
* ``fallback_language``
* ``allowed_plugins``
"""
plugins = self.get_allowed_plugins()
return {
'slot': self.slot,
'title': self.title,
'role': self.role,
'fallback_language': self.fallback_language,
'allowed_plugins': [plugin.name for plugin in plugins],
}
|
def function[as_dict, parameter[self]]:
constant[
Return the contents as dictionary, for client-side export.
The dictionary contains the fields:
* ``slot``
* ``title``
* ``role``
* ``fallback_language``
* ``allowed_plugins``
]
variable[plugins] assign[=] call[name[self].get_allowed_plugins, parameter[]]
return[dictionary[[<ast.Constant object at 0x7da1b10e4e20>, <ast.Constant object at 0x7da1b10e6b30>, <ast.Constant object at 0x7da1b10e7ca0>, <ast.Constant object at 0x7da1b10e7970>, <ast.Constant object at 0x7da1b10e7df0>], [<ast.Attribute object at 0x7da1b10e4d60>, <ast.Attribute object at 0x7da1b10e7580>, <ast.Attribute object at 0x7da1b10e6a70>, <ast.Attribute object at 0x7da1b10e5090>, <ast.ListComp object at 0x7da1b10e5f60>]]]
|
keyword[def] identifier[as_dict] ( identifier[self] ):
literal[string]
identifier[plugins] = identifier[self] . identifier[get_allowed_plugins] ()
keyword[return] {
literal[string] : identifier[self] . identifier[slot] ,
literal[string] : identifier[self] . identifier[title] ,
literal[string] : identifier[self] . identifier[role] ,
literal[string] : identifier[self] . identifier[fallback_language] ,
literal[string] :[ identifier[plugin] . identifier[name] keyword[for] identifier[plugin] keyword[in] identifier[plugins] ],
}
|
def as_dict(self):
"""
Return the contents as dictionary, for client-side export.
The dictionary contains the fields:
* ``slot``
* ``title``
* ``role``
* ``fallback_language``
* ``allowed_plugins``
"""
plugins = self.get_allowed_plugins()
return {'slot': self.slot, 'title': self.title, 'role': self.role, 'fallback_language': self.fallback_language, 'allowed_plugins': [plugin.name for plugin in plugins]}
|
def create_token(
self,
registry_address,
initial_alloc=10 ** 6,
name='raidentester',
symbol='RDT',
decimals=2,
timeout=60,
auto_register=True,
):
""" Create a proxy for a new HumanStandardToken (ERC20), that is
initialized with Args(below).
Per default it will be registered with 'raiden'.
Args:
initial_alloc (int): amount of initial tokens.
name (str): human readable token name.
symbol (str): token shorthand symbol.
decimals (int): decimal places.
timeout (int): timeout in seconds for creation.
auto_register (boolean): if True(default), automatically register
the token with raiden.
Returns:
token_address_hex: the hex encoded address of the new token/token.
"""
with gevent.Timeout(timeout):
token_address = deploy_contract_web3(
CONTRACT_HUMAN_STANDARD_TOKEN,
self._chain.client,
contract_manager=self._raiden.contract_manager,
constructor_arguments=(initial_alloc, name, decimals, symbol),
)
token_address_hex = to_checksum_address(token_address)
if auto_register:
self.register_token(registry_address, token_address_hex)
print("Successfully created {}the token '{}'.".format(
'and registered ' if auto_register else ' ',
name,
))
return token_address_hex
|
def function[create_token, parameter[self, registry_address, initial_alloc, name, symbol, decimals, timeout, auto_register]]:
constant[ Create a proxy for a new HumanStandardToken (ERC20), that is
initialized with Args(below).
Per default it will be registered with 'raiden'.
Args:
initial_alloc (int): amount of initial tokens.
name (str): human readable token name.
symbol (str): token shorthand symbol.
decimals (int): decimal places.
timeout (int): timeout in seconds for creation.
auto_register (boolean): if True(default), automatically register
the token with raiden.
Returns:
token_address_hex: the hex encoded address of the new token/token.
]
with call[name[gevent].Timeout, parameter[name[timeout]]] begin[:]
variable[token_address] assign[=] call[name[deploy_contract_web3], parameter[name[CONTRACT_HUMAN_STANDARD_TOKEN], name[self]._chain.client]]
variable[token_address_hex] assign[=] call[name[to_checksum_address], parameter[name[token_address]]]
if name[auto_register] begin[:]
call[name[self].register_token, parameter[name[registry_address], name[token_address_hex]]]
call[name[print], parameter[call[constant[Successfully created {}the token '{}'.].format, parameter[<ast.IfExp object at 0x7da1b170a0e0>, name[name]]]]]
return[name[token_address_hex]]
|
keyword[def] identifier[create_token] (
identifier[self] ,
identifier[registry_address] ,
identifier[initial_alloc] = literal[int] ** literal[int] ,
identifier[name] = literal[string] ,
identifier[symbol] = literal[string] ,
identifier[decimals] = literal[int] ,
identifier[timeout] = literal[int] ,
identifier[auto_register] = keyword[True] ,
):
literal[string]
keyword[with] identifier[gevent] . identifier[Timeout] ( identifier[timeout] ):
identifier[token_address] = identifier[deploy_contract_web3] (
identifier[CONTRACT_HUMAN_STANDARD_TOKEN] ,
identifier[self] . identifier[_chain] . identifier[client] ,
identifier[contract_manager] = identifier[self] . identifier[_raiden] . identifier[contract_manager] ,
identifier[constructor_arguments] =( identifier[initial_alloc] , identifier[name] , identifier[decimals] , identifier[symbol] ),
)
identifier[token_address_hex] = identifier[to_checksum_address] ( identifier[token_address] )
keyword[if] identifier[auto_register] :
identifier[self] . identifier[register_token] ( identifier[registry_address] , identifier[token_address_hex] )
identifier[print] ( literal[string] . identifier[format] (
literal[string] keyword[if] identifier[auto_register] keyword[else] literal[string] ,
identifier[name] ,
))
keyword[return] identifier[token_address_hex]
|
def create_token(self, registry_address, initial_alloc=10 ** 6, name='raidentester', symbol='RDT', decimals=2, timeout=60, auto_register=True):
""" Create a proxy for a new HumanStandardToken (ERC20), that is
initialized with Args(below).
Per default it will be registered with 'raiden'.
Args:
initial_alloc (int): amount of initial tokens.
name (str): human readable token name.
symbol (str): token shorthand symbol.
decimals (int): decimal places.
timeout (int): timeout in seconds for creation.
auto_register (boolean): if True(default), automatically register
the token with raiden.
Returns:
token_address_hex: the hex encoded address of the new token/token.
"""
with gevent.Timeout(timeout):
token_address = deploy_contract_web3(CONTRACT_HUMAN_STANDARD_TOKEN, self._chain.client, contract_manager=self._raiden.contract_manager, constructor_arguments=(initial_alloc, name, decimals, symbol)) # depends on [control=['with'], data=[]]
token_address_hex = to_checksum_address(token_address)
if auto_register:
self.register_token(registry_address, token_address_hex) # depends on [control=['if'], data=[]]
print("Successfully created {}the token '{}'.".format('and registered ' if auto_register else ' ', name))
return token_address_hex
|
def validate_binary_sign(signed_query, signature, cert=None, algorithm=OneLogin_Saml2_Constants.RSA_SHA1, debug=False):
"""
Validates signed binary data (Used to validate GET Signature).
:param signed_query: The element we should validate
:type: string
:param signature: The signature that will be validate
:type: string
:param cert: The public cert
:type: string
:param algorithm: Signature algorithm
:type: string
:param debug: Activate the xmlsec debug
:type: bool
:param raise_exceptions: Whether to return false on failure or raise an exception
:type raise_exceptions: Boolean
"""
error_callback_method = None
if debug:
error_callback_method = print_xmlsec_errors
xmlsec.set_error_callback(error_callback_method)
dsig_ctx = xmlsec.DSigCtx()
file_cert = OneLogin_Saml2_Utils.write_temp_file(cert)
dsig_ctx.signKey = xmlsec.Key.load(file_cert.name, xmlsec.KeyDataFormatCertPem, None)
file_cert.close()
# Sign the metadata with our private key.
sign_algorithm_transform_map = {
OneLogin_Saml2_Constants.DSA_SHA1: xmlsec.TransformDsaSha1,
OneLogin_Saml2_Constants.RSA_SHA1: xmlsec.TransformRsaSha1,
OneLogin_Saml2_Constants.RSA_SHA256: xmlsec.TransformRsaSha256,
OneLogin_Saml2_Constants.RSA_SHA384: xmlsec.TransformRsaSha384,
OneLogin_Saml2_Constants.RSA_SHA512: xmlsec.TransformRsaSha512
}
sign_algorithm_transform = sign_algorithm_transform_map.get(algorithm, xmlsec.TransformRsaSha1)
dsig_ctx.verifyBinary(signed_query, sign_algorithm_transform, signature)
return True
|
def function[validate_binary_sign, parameter[signed_query, signature, cert, algorithm, debug]]:
constant[
Validates signed binary data (Used to validate GET Signature).
:param signed_query: The element we should validate
:type: string
:param signature: The signature that will be validate
:type: string
:param cert: The public cert
:type: string
:param algorithm: Signature algorithm
:type: string
:param debug: Activate the xmlsec debug
:type: bool
:param raise_exceptions: Whether to return false on failure or raise an exception
:type raise_exceptions: Boolean
]
variable[error_callback_method] assign[=] constant[None]
if name[debug] begin[:]
variable[error_callback_method] assign[=] name[print_xmlsec_errors]
call[name[xmlsec].set_error_callback, parameter[name[error_callback_method]]]
variable[dsig_ctx] assign[=] call[name[xmlsec].DSigCtx, parameter[]]
variable[file_cert] assign[=] call[name[OneLogin_Saml2_Utils].write_temp_file, parameter[name[cert]]]
name[dsig_ctx].signKey assign[=] call[name[xmlsec].Key.load, parameter[name[file_cert].name, name[xmlsec].KeyDataFormatCertPem, constant[None]]]
call[name[file_cert].close, parameter[]]
variable[sign_algorithm_transform_map] assign[=] dictionary[[<ast.Attribute object at 0x7da1b18e7ee0>, <ast.Attribute object at 0x7da1b18e5b70>, <ast.Attribute object at 0x7da1b18e56c0>, <ast.Attribute object at 0x7da1b18e5960>, <ast.Attribute object at 0x7da1b18e4bb0>], [<ast.Attribute object at 0x7da1b18e4a60>, <ast.Attribute object at 0x7da1b18e7010>, <ast.Attribute object at 0x7da1b18e7490>, <ast.Attribute object at 0x7da1b18e4e20>, <ast.Attribute object at 0x7da1b18e6650>]]
variable[sign_algorithm_transform] assign[=] call[name[sign_algorithm_transform_map].get, parameter[name[algorithm], name[xmlsec].TransformRsaSha1]]
call[name[dsig_ctx].verifyBinary, parameter[name[signed_query], name[sign_algorithm_transform], name[signature]]]
return[constant[True]]
|
keyword[def] identifier[validate_binary_sign] ( identifier[signed_query] , identifier[signature] , identifier[cert] = keyword[None] , identifier[algorithm] = identifier[OneLogin_Saml2_Constants] . identifier[RSA_SHA1] , identifier[debug] = keyword[False] ):
literal[string]
identifier[error_callback_method] = keyword[None]
keyword[if] identifier[debug] :
identifier[error_callback_method] = identifier[print_xmlsec_errors]
identifier[xmlsec] . identifier[set_error_callback] ( identifier[error_callback_method] )
identifier[dsig_ctx] = identifier[xmlsec] . identifier[DSigCtx] ()
identifier[file_cert] = identifier[OneLogin_Saml2_Utils] . identifier[write_temp_file] ( identifier[cert] )
identifier[dsig_ctx] . identifier[signKey] = identifier[xmlsec] . identifier[Key] . identifier[load] ( identifier[file_cert] . identifier[name] , identifier[xmlsec] . identifier[KeyDataFormatCertPem] , keyword[None] )
identifier[file_cert] . identifier[close] ()
identifier[sign_algorithm_transform_map] ={
identifier[OneLogin_Saml2_Constants] . identifier[DSA_SHA1] : identifier[xmlsec] . identifier[TransformDsaSha1] ,
identifier[OneLogin_Saml2_Constants] . identifier[RSA_SHA1] : identifier[xmlsec] . identifier[TransformRsaSha1] ,
identifier[OneLogin_Saml2_Constants] . identifier[RSA_SHA256] : identifier[xmlsec] . identifier[TransformRsaSha256] ,
identifier[OneLogin_Saml2_Constants] . identifier[RSA_SHA384] : identifier[xmlsec] . identifier[TransformRsaSha384] ,
identifier[OneLogin_Saml2_Constants] . identifier[RSA_SHA512] : identifier[xmlsec] . identifier[TransformRsaSha512]
}
identifier[sign_algorithm_transform] = identifier[sign_algorithm_transform_map] . identifier[get] ( identifier[algorithm] , identifier[xmlsec] . identifier[TransformRsaSha1] )
identifier[dsig_ctx] . identifier[verifyBinary] ( identifier[signed_query] , identifier[sign_algorithm_transform] , identifier[signature] )
keyword[return] keyword[True]
|
def validate_binary_sign(signed_query, signature, cert=None, algorithm=OneLogin_Saml2_Constants.RSA_SHA1, debug=False):
"""
Validates signed binary data (Used to validate GET Signature).
:param signed_query: The element we should validate
:type: string
:param signature: The signature that will be validate
:type: string
:param cert: The public cert
:type: string
:param algorithm: Signature algorithm
:type: string
:param debug: Activate the xmlsec debug
:type: bool
:param raise_exceptions: Whether to return false on failure or raise an exception
:type raise_exceptions: Boolean
"""
error_callback_method = None
if debug:
error_callback_method = print_xmlsec_errors # depends on [control=['if'], data=[]]
xmlsec.set_error_callback(error_callback_method)
dsig_ctx = xmlsec.DSigCtx()
file_cert = OneLogin_Saml2_Utils.write_temp_file(cert)
dsig_ctx.signKey = xmlsec.Key.load(file_cert.name, xmlsec.KeyDataFormatCertPem, None)
file_cert.close()
# Sign the metadata with our private key.
sign_algorithm_transform_map = {OneLogin_Saml2_Constants.DSA_SHA1: xmlsec.TransformDsaSha1, OneLogin_Saml2_Constants.RSA_SHA1: xmlsec.TransformRsaSha1, OneLogin_Saml2_Constants.RSA_SHA256: xmlsec.TransformRsaSha256, OneLogin_Saml2_Constants.RSA_SHA384: xmlsec.TransformRsaSha384, OneLogin_Saml2_Constants.RSA_SHA512: xmlsec.TransformRsaSha512}
sign_algorithm_transform = sign_algorithm_transform_map.get(algorithm, xmlsec.TransformRsaSha1)
dsig_ctx.verifyBinary(signed_query, sign_algorithm_transform, signature)
return True
|
def get_offset(target):
"""
returns a location's time zone offset from UTC in minutes.
"""
from pytz import timezone
import pytz
from datetime import datetime
utc = pytz.utc
today = datetime.now()
tz_target = timezone(tf.certain_timezone_at(lat=target['lat'], lng=target['lng']))
# ATTENTION: tz_target could be None! handle error case
today_target = tz_target.localize(today)
today_utc = utc.localize(today)
return (today_utc - today_target).total_seconds() / 60
|
def function[get_offset, parameter[target]]:
constant[
returns a location's time zone offset from UTC in minutes.
]
from relative_module[pytz] import module[timezone]
import module[pytz]
from relative_module[datetime] import module[datetime]
variable[utc] assign[=] name[pytz].utc
variable[today] assign[=] call[name[datetime].now, parameter[]]
variable[tz_target] assign[=] call[name[timezone], parameter[call[name[tf].certain_timezone_at, parameter[]]]]
variable[today_target] assign[=] call[name[tz_target].localize, parameter[name[today]]]
variable[today_utc] assign[=] call[name[utc].localize, parameter[name[today]]]
return[binary_operation[call[binary_operation[name[today_utc] - name[today_target]].total_seconds, parameter[]] / constant[60]]]
|
keyword[def] identifier[get_offset] ( identifier[target] ):
literal[string]
keyword[from] identifier[pytz] keyword[import] identifier[timezone]
keyword[import] identifier[pytz]
keyword[from] identifier[datetime] keyword[import] identifier[datetime]
identifier[utc] = identifier[pytz] . identifier[utc]
identifier[today] = identifier[datetime] . identifier[now] ()
identifier[tz_target] = identifier[timezone] ( identifier[tf] . identifier[certain_timezone_at] ( identifier[lat] = identifier[target] [ literal[string] ], identifier[lng] = identifier[target] [ literal[string] ]))
identifier[today_target] = identifier[tz_target] . identifier[localize] ( identifier[today] )
identifier[today_utc] = identifier[utc] . identifier[localize] ( identifier[today] )
keyword[return] ( identifier[today_utc] - identifier[today_target] ). identifier[total_seconds] ()/ literal[int]
|
def get_offset(target):
"""
returns a location's time zone offset from UTC in minutes.
"""
from pytz import timezone
import pytz
from datetime import datetime
utc = pytz.utc
today = datetime.now()
tz_target = timezone(tf.certain_timezone_at(lat=target['lat'], lng=target['lng']))
# ATTENTION: tz_target could be None! handle error case
today_target = tz_target.localize(today)
today_utc = utc.localize(today)
return (today_utc - today_target).total_seconds() / 60
|
def _update(self, **kwargs):
"""
Update the mutable field `value`.
:rtype: bool
"""
if 'value' in kwargs and self.protocol_agent_values.get('value') != \
kwargs.get('value'):
self.protocol_agent_values.update(value=kwargs['value'])
return True
return False
|
def function[_update, parameter[self]]:
constant[
Update the mutable field `value`.
:rtype: bool
]
if <ast.BoolOp object at 0x7da1b1a2ceb0> begin[:]
call[name[self].protocol_agent_values.update, parameter[]]
return[constant[True]]
return[constant[False]]
|
keyword[def] identifier[_update] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[kwargs] keyword[and] identifier[self] . identifier[protocol_agent_values] . identifier[get] ( literal[string] )!= identifier[kwargs] . identifier[get] ( literal[string] ):
identifier[self] . identifier[protocol_agent_values] . identifier[update] ( identifier[value] = identifier[kwargs] [ literal[string] ])
keyword[return] keyword[True]
keyword[return] keyword[False]
|
def _update(self, **kwargs):
"""
Update the mutable field `value`.
:rtype: bool
"""
if 'value' in kwargs and self.protocol_agent_values.get('value') != kwargs.get('value'):
self.protocol_agent_values.update(value=kwargs['value'])
return True # depends on [control=['if'], data=[]]
return False
|
def remove_item(self, item):
"""
Remove (and un-index) an object
:param item: object to remove
:type item: alignak.objects.item.Item
:return: None
"""
self.unindex_item(item)
self.items.pop(item.uuid, None)
|
def function[remove_item, parameter[self, item]]:
constant[
Remove (and un-index) an object
:param item: object to remove
:type item: alignak.objects.item.Item
:return: None
]
call[name[self].unindex_item, parameter[name[item]]]
call[name[self].items.pop, parameter[name[item].uuid, constant[None]]]
|
keyword[def] identifier[remove_item] ( identifier[self] , identifier[item] ):
literal[string]
identifier[self] . identifier[unindex_item] ( identifier[item] )
identifier[self] . identifier[items] . identifier[pop] ( identifier[item] . identifier[uuid] , keyword[None] )
|
def remove_item(self, item):
"""
Remove (and un-index) an object
:param item: object to remove
:type item: alignak.objects.item.Item
:return: None
"""
self.unindex_item(item)
self.items.pop(item.uuid, None)
|
def separate_string(string):
"""
>>> separate_string("test <2>")
(['test ', ''], ['2'])
"""
string_list = regex.split(r'<(?![!=])', regex.sub(r'>', '<', string))
return string_list[::2], string_list[1::2]
|
def function[separate_string, parameter[string]]:
constant[
>>> separate_string("test <2>")
(['test ', ''], ['2'])
]
variable[string_list] assign[=] call[name[regex].split, parameter[constant[<(?![!=])], call[name[regex].sub, parameter[constant[>], constant[<], name[string]]]]]
return[tuple[[<ast.Subscript object at 0x7da1b0e61f90>, <ast.Subscript object at 0x7da1b0f0d330>]]]
|
keyword[def] identifier[separate_string] ( identifier[string] ):
literal[string]
identifier[string_list] = identifier[regex] . identifier[split] ( literal[string] , identifier[regex] . identifier[sub] ( literal[string] , literal[string] , identifier[string] ))
keyword[return] identifier[string_list] [:: literal[int] ], identifier[string_list] [ literal[int] :: literal[int] ]
|
def separate_string(string):
"""
>>> separate_string("test <2>")
(['test ', ''], ['2'])
"""
string_list = regex.split('<(?![!=])', regex.sub('>', '<', string))
return (string_list[::2], string_list[1::2])
|
def randchoice(seq: Union[str, list, tuple, dict, set]) -> any:
"""Return a randomly chosen element from the given sequence.
Raises TypeError if *seq* is not str, list, tuple, dict, set and an
IndexError if it is empty.
>>> randchoice((1, 2, 'a', 'b')) #doctest:+SKIP
'a'
"""
if not isinstance(seq, (str, list, tuple, dict, set)):
raise TypeError('seq must be str, list, tuple, dict or set')
if len(seq) <= 0:
raise IndexError('seq must have at least one element')
if isinstance(seq, set):
values = list(seq)
return randchoice(values)
elif isinstance(seq, dict):
indexes = list(seq)
index = randchoice(indexes)
else:
index = randbelow(len(seq))
return seq[index]
|
def function[randchoice, parameter[seq]]:
constant[Return a randomly chosen element from the given sequence.
Raises TypeError if *seq* is not str, list, tuple, dict, set and an
IndexError if it is empty.
>>> randchoice((1, 2, 'a', 'b')) #doctest:+SKIP
'a'
]
if <ast.UnaryOp object at 0x7da18c4cd030> begin[:]
<ast.Raise object at 0x7da18c4cce50>
if compare[call[name[len], parameter[name[seq]]] less_or_equal[<=] constant[0]] begin[:]
<ast.Raise object at 0x7da18c4cc460>
if call[name[isinstance], parameter[name[seq], name[set]]] begin[:]
variable[values] assign[=] call[name[list], parameter[name[seq]]]
return[call[name[randchoice], parameter[name[values]]]]
return[call[name[seq]][name[index]]]
|
keyword[def] identifier[randchoice] ( identifier[seq] : identifier[Union] [ identifier[str] , identifier[list] , identifier[tuple] , identifier[dict] , identifier[set] ])-> identifier[any] :
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[seq] ,( identifier[str] , identifier[list] , identifier[tuple] , identifier[dict] , identifier[set] )):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[len] ( identifier[seq] )<= literal[int] :
keyword[raise] identifier[IndexError] ( literal[string] )
keyword[if] identifier[isinstance] ( identifier[seq] , identifier[set] ):
identifier[values] = identifier[list] ( identifier[seq] )
keyword[return] identifier[randchoice] ( identifier[values] )
keyword[elif] identifier[isinstance] ( identifier[seq] , identifier[dict] ):
identifier[indexes] = identifier[list] ( identifier[seq] )
identifier[index] = identifier[randchoice] ( identifier[indexes] )
keyword[else] :
identifier[index] = identifier[randbelow] ( identifier[len] ( identifier[seq] ))
keyword[return] identifier[seq] [ identifier[index] ]
|
def randchoice(seq: Union[str, list, tuple, dict, set]) -> any:
"""Return a randomly chosen element from the given sequence.
Raises TypeError if *seq* is not str, list, tuple, dict, set and an
IndexError if it is empty.
>>> randchoice((1, 2, 'a', 'b')) #doctest:+SKIP
'a'
"""
if not isinstance(seq, (str, list, tuple, dict, set)):
raise TypeError('seq must be str, list, tuple, dict or set') # depends on [control=['if'], data=[]]
if len(seq) <= 0:
raise IndexError('seq must have at least one element') # depends on [control=['if'], data=[]]
if isinstance(seq, set):
values = list(seq)
return randchoice(values) # depends on [control=['if'], data=[]]
elif isinstance(seq, dict):
indexes = list(seq)
index = randchoice(indexes) # depends on [control=['if'], data=[]]
else:
index = randbelow(len(seq))
return seq[index]
|
def _getJson(url, token='', version=''):
'''for backwards compat, accepting token and version but ignoring'''
if token:
return _getJsonIEXCloud(url, token, version)
return _getJsonOrig(url)
|
def function[_getJson, parameter[url, token, version]]:
constant[for backwards compat, accepting token and version but ignoring]
if name[token] begin[:]
return[call[name[_getJsonIEXCloud], parameter[name[url], name[token], name[version]]]]
return[call[name[_getJsonOrig], parameter[name[url]]]]
|
keyword[def] identifier[_getJson] ( identifier[url] , identifier[token] = literal[string] , identifier[version] = literal[string] ):
literal[string]
keyword[if] identifier[token] :
keyword[return] identifier[_getJsonIEXCloud] ( identifier[url] , identifier[token] , identifier[version] )
keyword[return] identifier[_getJsonOrig] ( identifier[url] )
|
def _getJson(url, token='', version=''):
"""for backwards compat, accepting token and version but ignoring"""
if token:
return _getJsonIEXCloud(url, token, version) # depends on [control=['if'], data=[]]
return _getJsonOrig(url)
|
def _repr_html_(self):
"""
Return a html representation for a particular DataFrame.
Mainly for IPython notebook.
"""
if self._info_repr():
buf = StringIO("")
self.info(buf=buf)
# need to escape the <class>, should be the first line.
val = buf.getvalue().replace('<', r'<', 1)
val = val.replace('>', r'>', 1)
return '<pre>' + val + '</pre>'
if get_option("display.notebook_repr_html"):
max_rows = get_option("display.max_rows")
max_cols = get_option("display.max_columns")
show_dimensions = get_option("display.show_dimensions")
return self.to_html(max_rows=max_rows, max_cols=max_cols,
show_dimensions=show_dimensions, notebook=True)
else:
return None
|
def function[_repr_html_, parameter[self]]:
constant[
Return a html representation for a particular DataFrame.
Mainly for IPython notebook.
]
if call[name[self]._info_repr, parameter[]] begin[:]
variable[buf] assign[=] call[name[StringIO], parameter[constant[]]]
call[name[self].info, parameter[]]
variable[val] assign[=] call[call[name[buf].getvalue, parameter[]].replace, parameter[constant[<], constant[<], constant[1]]]
variable[val] assign[=] call[name[val].replace, parameter[constant[>], constant[>], constant[1]]]
return[binary_operation[binary_operation[constant[<pre>] + name[val]] + constant[</pre>]]]
if call[name[get_option], parameter[constant[display.notebook_repr_html]]] begin[:]
variable[max_rows] assign[=] call[name[get_option], parameter[constant[display.max_rows]]]
variable[max_cols] assign[=] call[name[get_option], parameter[constant[display.max_columns]]]
variable[show_dimensions] assign[=] call[name[get_option], parameter[constant[display.show_dimensions]]]
return[call[name[self].to_html, parameter[]]]
|
keyword[def] identifier[_repr_html_] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_info_repr] ():
identifier[buf] = identifier[StringIO] ( literal[string] )
identifier[self] . identifier[info] ( identifier[buf] = identifier[buf] )
identifier[val] = identifier[buf] . identifier[getvalue] (). identifier[replace] ( literal[string] , literal[string] , literal[int] )
identifier[val] = identifier[val] . identifier[replace] ( literal[string] , literal[string] , literal[int] )
keyword[return] literal[string] + identifier[val] + literal[string]
keyword[if] identifier[get_option] ( literal[string] ):
identifier[max_rows] = identifier[get_option] ( literal[string] )
identifier[max_cols] = identifier[get_option] ( literal[string] )
identifier[show_dimensions] = identifier[get_option] ( literal[string] )
keyword[return] identifier[self] . identifier[to_html] ( identifier[max_rows] = identifier[max_rows] , identifier[max_cols] = identifier[max_cols] ,
identifier[show_dimensions] = identifier[show_dimensions] , identifier[notebook] = keyword[True] )
keyword[else] :
keyword[return] keyword[None]
|
def _repr_html_(self):
"""
Return a html representation for a particular DataFrame.
Mainly for IPython notebook.
"""
if self._info_repr():
buf = StringIO('')
self.info(buf=buf)
# need to escape the <class>, should be the first line.
val = buf.getvalue().replace('<', '<', 1)
val = val.replace('>', '>', 1)
return '<pre>' + val + '</pre>' # depends on [control=['if'], data=[]]
if get_option('display.notebook_repr_html'):
max_rows = get_option('display.max_rows')
max_cols = get_option('display.max_columns')
show_dimensions = get_option('display.show_dimensions')
return self.to_html(max_rows=max_rows, max_cols=max_cols, show_dimensions=show_dimensions, notebook=True) # depends on [control=['if'], data=[]]
else:
return None
|
def queries(self, request):
'''Multiple Database Queries'''
queries = self.get_queries(request)
worlds = []
with self.mapper.begin() as session:
for _ in range(queries):
world = session.query(World).get(randint(1, MAXINT))
worlds.append(self.get_json(world))
return Json(worlds).http_response(request)
|
def function[queries, parameter[self, request]]:
constant[Multiple Database Queries]
variable[queries] assign[=] call[name[self].get_queries, parameter[name[request]]]
variable[worlds] assign[=] list[[]]
with call[name[self].mapper.begin, parameter[]] begin[:]
for taget[name[_]] in starred[call[name[range], parameter[name[queries]]]] begin[:]
variable[world] assign[=] call[call[name[session].query, parameter[name[World]]].get, parameter[call[name[randint], parameter[constant[1], name[MAXINT]]]]]
call[name[worlds].append, parameter[call[name[self].get_json, parameter[name[world]]]]]
return[call[call[name[Json], parameter[name[worlds]]].http_response, parameter[name[request]]]]
|
keyword[def] identifier[queries] ( identifier[self] , identifier[request] ):
literal[string]
identifier[queries] = identifier[self] . identifier[get_queries] ( identifier[request] )
identifier[worlds] =[]
keyword[with] identifier[self] . identifier[mapper] . identifier[begin] () keyword[as] identifier[session] :
keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[queries] ):
identifier[world] = identifier[session] . identifier[query] ( identifier[World] ). identifier[get] ( identifier[randint] ( literal[int] , identifier[MAXINT] ))
identifier[worlds] . identifier[append] ( identifier[self] . identifier[get_json] ( identifier[world] ))
keyword[return] identifier[Json] ( identifier[worlds] ). identifier[http_response] ( identifier[request] )
|
def queries(self, request):
"""Multiple Database Queries"""
queries = self.get_queries(request)
worlds = []
with self.mapper.begin() as session:
for _ in range(queries):
world = session.query(World).get(randint(1, MAXINT))
worlds.append(self.get_json(world)) # depends on [control=['for'], data=[]] # depends on [control=['with'], data=['session']]
return Json(worlds).http_response(request)
|
async def start_pipe_server(
client_connected_cb,
*,
path,
loop=None,
limit=DEFAULT_LIMIT
):
"""
Start listening for connection using Windows named pipes.
"""
path = path.replace('/', '\\')
loop = loop or asyncio.get_event_loop()
def factory():
reader = asyncio.StreamReader(limit=limit, loop=loop)
protocol = asyncio.StreamReaderProtocol(
reader,
client_connected_cb,
loop=loop,
)
return protocol
server, *_ = await loop.start_serving_pipe(factory, address=path)
# The returned instance sadly doesn't have a `wait_closed` method so we add
# one.
closed = asyncio.Event(loop=loop)
original_close = server.close
def close():
original_close()
closed.set()
server.close = close
server.wait_closed = closed.wait
return server
|
<ast.AsyncFunctionDef object at 0x7da1b0aa5510>
|
keyword[async] keyword[def] identifier[start_pipe_server] (
identifier[client_connected_cb] ,
*,
identifier[path] ,
identifier[loop] = keyword[None] ,
identifier[limit] = identifier[DEFAULT_LIMIT]
):
literal[string]
identifier[path] = identifier[path] . identifier[replace] ( literal[string] , literal[string] )
identifier[loop] = identifier[loop] keyword[or] identifier[asyncio] . identifier[get_event_loop] ()
keyword[def] identifier[factory] ():
identifier[reader] = identifier[asyncio] . identifier[StreamReader] ( identifier[limit] = identifier[limit] , identifier[loop] = identifier[loop] )
identifier[protocol] = identifier[asyncio] . identifier[StreamReaderProtocol] (
identifier[reader] ,
identifier[client_connected_cb] ,
identifier[loop] = identifier[loop] ,
)
keyword[return] identifier[protocol]
identifier[server] ,* identifier[_] = keyword[await] identifier[loop] . identifier[start_serving_pipe] ( identifier[factory] , identifier[address] = identifier[path] )
identifier[closed] = identifier[asyncio] . identifier[Event] ( identifier[loop] = identifier[loop] )
identifier[original_close] = identifier[server] . identifier[close]
keyword[def] identifier[close] ():
identifier[original_close] ()
identifier[closed] . identifier[set] ()
identifier[server] . identifier[close] = identifier[close]
identifier[server] . identifier[wait_closed] = identifier[closed] . identifier[wait]
keyword[return] identifier[server]
|
async def start_pipe_server(client_connected_cb, *, path, loop=None, limit=DEFAULT_LIMIT):
"""
Start listening for connection using Windows named pipes.
"""
path = path.replace('/', '\\')
loop = loop or asyncio.get_event_loop()
def factory():
reader = asyncio.StreamReader(limit=limit, loop=loop)
protocol = asyncio.StreamReaderProtocol(reader, client_connected_cb, loop=loop)
return protocol
(server, *_) = await loop.start_serving_pipe(factory, address=path)
# The returned instance sadly doesn't have a `wait_closed` method so we add
# one.
closed = asyncio.Event(loop=loop)
original_close = server.close
def close():
original_close()
closed.set()
server.close = close
server.wait_closed = closed.wait
return server
|
def send_post(self, mri, method_name, **params):
"""Abstract method to dispatch a Post to the server
Args:
mri (str): The mri of the Block
method_name (str): The name of the Method within the Block
params: The parameters to send
Returns:
The return results from the server
"""
typ, parameters = convert_to_type_tuple_value(serialize_object(params))
uri = NTURI(typ[2])
uri = uri.wrap(
path="%s.%s" % (mri, method_name),
kws=parameters,
scheme="pva"
)
value = self._ctxt.rpc(mri, uri, timeout=None)
return convert_value_to_dict(value)
|
def function[send_post, parameter[self, mri, method_name]]:
constant[Abstract method to dispatch a Post to the server
Args:
mri (str): The mri of the Block
method_name (str): The name of the Method within the Block
params: The parameters to send
Returns:
The return results from the server
]
<ast.Tuple object at 0x7da1b05f9f90> assign[=] call[name[convert_to_type_tuple_value], parameter[call[name[serialize_object], parameter[name[params]]]]]
variable[uri] assign[=] call[name[NTURI], parameter[call[name[typ]][constant[2]]]]
variable[uri] assign[=] call[name[uri].wrap, parameter[]]
variable[value] assign[=] call[name[self]._ctxt.rpc, parameter[name[mri], name[uri]]]
return[call[name[convert_value_to_dict], parameter[name[value]]]]
|
keyword[def] identifier[send_post] ( identifier[self] , identifier[mri] , identifier[method_name] ,** identifier[params] ):
literal[string]
identifier[typ] , identifier[parameters] = identifier[convert_to_type_tuple_value] ( identifier[serialize_object] ( identifier[params] ))
identifier[uri] = identifier[NTURI] ( identifier[typ] [ literal[int] ])
identifier[uri] = identifier[uri] . identifier[wrap] (
identifier[path] = literal[string] %( identifier[mri] , identifier[method_name] ),
identifier[kws] = identifier[parameters] ,
identifier[scheme] = literal[string]
)
identifier[value] = identifier[self] . identifier[_ctxt] . identifier[rpc] ( identifier[mri] , identifier[uri] , identifier[timeout] = keyword[None] )
keyword[return] identifier[convert_value_to_dict] ( identifier[value] )
|
def send_post(self, mri, method_name, **params):
"""Abstract method to dispatch a Post to the server
Args:
mri (str): The mri of the Block
method_name (str): The name of the Method within the Block
params: The parameters to send
Returns:
The return results from the server
"""
(typ, parameters) = convert_to_type_tuple_value(serialize_object(params))
uri = NTURI(typ[2])
uri = uri.wrap(path='%s.%s' % (mri, method_name), kws=parameters, scheme='pva')
value = self._ctxt.rpc(mri, uri, timeout=None)
return convert_value_to_dict(value)
|
def to_dict(self, fields=None):
"""Convert context to dict containing only builtin types.
Args:
fields (list of str): If present, only write these fields into the
dict. This can be used to avoid constructing expensive fields
(such as 'graph') for some cases.
Returns:
dict: Dictified context.
"""
data = {}
def _add(field):
return (fields is None or field in fields)
if _add("resolved_packages"):
resolved_packages = []
for pkg in (self._resolved_packages or []):
resolved_packages.append(pkg.handle.to_dict())
data["resolved_packages"] = resolved_packages
if _add("serialize_version"):
data["serialize_version"] = \
'.'.join(map(str, ResolvedContext.serialize_version))
if _add("patch_locks"):
data["patch_locks"] = dict((k, v.name) for k, v in self.patch_locks)
if _add("package_orderers"):
package_orderers = [package_order.to_pod(x)
for x in (self.package_orderers or [])]
data["package_orderers"] = package_orderers or None
if _add("package_filter"):
data["package_filter"] = self.package_filter.to_pod()
if _add("graph"):
if self.graph_string and self.graph_string.startswith('{'):
graph_str = self.graph_string # already in compact format
else:
g = self.graph()
graph_str = write_compacted(g)
data["graph"] = graph_str
data.update(dict(
timestamp=self.timestamp,
requested_timestamp=self.requested_timestamp,
building=self.building,
caching=self.caching,
implicit_packages=map(str, self.implicit_packages),
package_requests=map(str, self._package_requests),
package_paths=self.package_paths,
default_patch_lock=self.default_patch_lock.name,
rez_version=self.rez_version,
rez_path=self.rez_path,
user=self.user,
host=self.host,
platform=self.platform,
arch=self.arch,
os=self.os,
created=self.created,
parent_suite_path=self.parent_suite_path,
suite_context_name=self.suite_context_name,
status=self.status_.name,
failure_description=self.failure_description,
from_cache=self.from_cache,
solve_time=self.solve_time,
load_time=self.load_time,
num_loaded_packages=self.num_loaded_packages
))
if fields:
data = dict((k, v) for k, v in data.iteritems() if k in fields)
return data
|
def function[to_dict, parameter[self, fields]]:
constant[Convert context to dict containing only builtin types.
Args:
fields (list of str): If present, only write these fields into the
dict. This can be used to avoid constructing expensive fields
(such as 'graph') for some cases.
Returns:
dict: Dictified context.
]
variable[data] assign[=] dictionary[[], []]
def function[_add, parameter[field]]:
return[<ast.BoolOp object at 0x7da1b18a0d90>]
if call[name[_add], parameter[constant[resolved_packages]]] begin[:]
variable[resolved_packages] assign[=] list[[]]
for taget[name[pkg]] in starred[<ast.BoolOp object at 0x7da1b18a1180>] begin[:]
call[name[resolved_packages].append, parameter[call[name[pkg].handle.to_dict, parameter[]]]]
call[name[data]][constant[resolved_packages]] assign[=] name[resolved_packages]
if call[name[_add], parameter[constant[serialize_version]]] begin[:]
call[name[data]][constant[serialize_version]] assign[=] call[constant[.].join, parameter[call[name[map], parameter[name[str], name[ResolvedContext].serialize_version]]]]
if call[name[_add], parameter[constant[patch_locks]]] begin[:]
call[name[data]][constant[patch_locks]] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da1b18a2770>]]
if call[name[_add], parameter[constant[package_orderers]]] begin[:]
variable[package_orderers] assign[=] <ast.ListComp object at 0x7da1b18a2fe0>
call[name[data]][constant[package_orderers]] assign[=] <ast.BoolOp object at 0x7da1b18a1720>
if call[name[_add], parameter[constant[package_filter]]] begin[:]
call[name[data]][constant[package_filter]] assign[=] call[name[self].package_filter.to_pod, parameter[]]
if call[name[_add], parameter[constant[graph]]] begin[:]
if <ast.BoolOp object at 0x7da1b18a3ca0> begin[:]
variable[graph_str] assign[=] name[self].graph_string
call[name[data]][constant[graph]] assign[=] name[graph_str]
call[name[data].update, parameter[call[name[dict], parameter[]]]]
if name[fields] begin[:]
variable[data] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da1b175ef20>]]
return[name[data]]
|
keyword[def] identifier[to_dict] ( identifier[self] , identifier[fields] = keyword[None] ):
literal[string]
identifier[data] ={}
keyword[def] identifier[_add] ( identifier[field] ):
keyword[return] ( identifier[fields] keyword[is] keyword[None] keyword[or] identifier[field] keyword[in] identifier[fields] )
keyword[if] identifier[_add] ( literal[string] ):
identifier[resolved_packages] =[]
keyword[for] identifier[pkg] keyword[in] ( identifier[self] . identifier[_resolved_packages] keyword[or] []):
identifier[resolved_packages] . identifier[append] ( identifier[pkg] . identifier[handle] . identifier[to_dict] ())
identifier[data] [ literal[string] ]= identifier[resolved_packages]
keyword[if] identifier[_add] ( literal[string] ):
identifier[data] [ literal[string] ]= literal[string] . identifier[join] ( identifier[map] ( identifier[str] , identifier[ResolvedContext] . identifier[serialize_version] ))
keyword[if] identifier[_add] ( literal[string] ):
identifier[data] [ literal[string] ]= identifier[dict] (( identifier[k] , identifier[v] . identifier[name] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[patch_locks] )
keyword[if] identifier[_add] ( literal[string] ):
identifier[package_orderers] =[ identifier[package_order] . identifier[to_pod] ( identifier[x] )
keyword[for] identifier[x] keyword[in] ( identifier[self] . identifier[package_orderers] keyword[or] [])]
identifier[data] [ literal[string] ]= identifier[package_orderers] keyword[or] keyword[None]
keyword[if] identifier[_add] ( literal[string] ):
identifier[data] [ literal[string] ]= identifier[self] . identifier[package_filter] . identifier[to_pod] ()
keyword[if] identifier[_add] ( literal[string] ):
keyword[if] identifier[self] . identifier[graph_string] keyword[and] identifier[self] . identifier[graph_string] . identifier[startswith] ( literal[string] ):
identifier[graph_str] = identifier[self] . identifier[graph_string]
keyword[else] :
identifier[g] = identifier[self] . identifier[graph] ()
identifier[graph_str] = identifier[write_compacted] ( identifier[g] )
identifier[data] [ literal[string] ]= identifier[graph_str]
identifier[data] . identifier[update] ( identifier[dict] (
identifier[timestamp] = identifier[self] . identifier[timestamp] ,
identifier[requested_timestamp] = identifier[self] . identifier[requested_timestamp] ,
identifier[building] = identifier[self] . identifier[building] ,
identifier[caching] = identifier[self] . identifier[caching] ,
identifier[implicit_packages] = identifier[map] ( identifier[str] , identifier[self] . identifier[implicit_packages] ),
identifier[package_requests] = identifier[map] ( identifier[str] , identifier[self] . identifier[_package_requests] ),
identifier[package_paths] = identifier[self] . identifier[package_paths] ,
identifier[default_patch_lock] = identifier[self] . identifier[default_patch_lock] . identifier[name] ,
identifier[rez_version] = identifier[self] . identifier[rez_version] ,
identifier[rez_path] = identifier[self] . identifier[rez_path] ,
identifier[user] = identifier[self] . identifier[user] ,
identifier[host] = identifier[self] . identifier[host] ,
identifier[platform] = identifier[self] . identifier[platform] ,
identifier[arch] = identifier[self] . identifier[arch] ,
identifier[os] = identifier[self] . identifier[os] ,
identifier[created] = identifier[self] . identifier[created] ,
identifier[parent_suite_path] = identifier[self] . identifier[parent_suite_path] ,
identifier[suite_context_name] = identifier[self] . identifier[suite_context_name] ,
identifier[status] = identifier[self] . identifier[status_] . identifier[name] ,
identifier[failure_description] = identifier[self] . identifier[failure_description] ,
identifier[from_cache] = identifier[self] . identifier[from_cache] ,
identifier[solve_time] = identifier[self] . identifier[solve_time] ,
identifier[load_time] = identifier[self] . identifier[load_time] ,
identifier[num_loaded_packages] = identifier[self] . identifier[num_loaded_packages]
))
keyword[if] identifier[fields] :
identifier[data] = identifier[dict] (( identifier[k] , identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[data] . identifier[iteritems] () keyword[if] identifier[k] keyword[in] identifier[fields] )
keyword[return] identifier[data]
|
def to_dict(self, fields=None):
"""Convert context to dict containing only builtin types.
Args:
fields (list of str): If present, only write these fields into the
dict. This can be used to avoid constructing expensive fields
(such as 'graph') for some cases.
Returns:
dict: Dictified context.
"""
data = {}
def _add(field):
return fields is None or field in fields
if _add('resolved_packages'):
resolved_packages = []
for pkg in self._resolved_packages or []:
resolved_packages.append(pkg.handle.to_dict()) # depends on [control=['for'], data=['pkg']]
data['resolved_packages'] = resolved_packages # depends on [control=['if'], data=[]]
if _add('serialize_version'):
data['serialize_version'] = '.'.join(map(str, ResolvedContext.serialize_version)) # depends on [control=['if'], data=[]]
if _add('patch_locks'):
data['patch_locks'] = dict(((k, v.name) for (k, v) in self.patch_locks)) # depends on [control=['if'], data=[]]
if _add('package_orderers'):
package_orderers = [package_order.to_pod(x) for x in self.package_orderers or []]
data['package_orderers'] = package_orderers or None # depends on [control=['if'], data=[]]
if _add('package_filter'):
data['package_filter'] = self.package_filter.to_pod() # depends on [control=['if'], data=[]]
if _add('graph'):
if self.graph_string and self.graph_string.startswith('{'):
graph_str = self.graph_string # already in compact format # depends on [control=['if'], data=[]]
else:
g = self.graph()
graph_str = write_compacted(g)
data['graph'] = graph_str # depends on [control=['if'], data=[]]
data.update(dict(timestamp=self.timestamp, requested_timestamp=self.requested_timestamp, building=self.building, caching=self.caching, implicit_packages=map(str, self.implicit_packages), package_requests=map(str, self._package_requests), package_paths=self.package_paths, default_patch_lock=self.default_patch_lock.name, rez_version=self.rez_version, rez_path=self.rez_path, user=self.user, host=self.host, platform=self.platform, arch=self.arch, os=self.os, created=self.created, parent_suite_path=self.parent_suite_path, suite_context_name=self.suite_context_name, status=self.status_.name, failure_description=self.failure_description, from_cache=self.from_cache, solve_time=self.solve_time, load_time=self.load_time, num_loaded_packages=self.num_loaded_packages))
if fields:
data = dict(((k, v) for (k, v) in data.iteritems() if k in fields)) # depends on [control=['if'], data=[]]
return data
|
def set_features(self, features):
"""Set features in the disco#info object.
All existing features are removed from `self`.
:Parameters:
- `features`: list of features.
:Types:
- `features`: sequence of `unicode`
"""
for var in self.features:
self.remove_feature(var)
for var in features:
self.add_feature(var)
|
def function[set_features, parameter[self, features]]:
constant[Set features in the disco#info object.
All existing features are removed from `self`.
:Parameters:
- `features`: list of features.
:Types:
- `features`: sequence of `unicode`
]
for taget[name[var]] in starred[name[self].features] begin[:]
call[name[self].remove_feature, parameter[name[var]]]
for taget[name[var]] in starred[name[features]] begin[:]
call[name[self].add_feature, parameter[name[var]]]
|
keyword[def] identifier[set_features] ( identifier[self] , identifier[features] ):
literal[string]
keyword[for] identifier[var] keyword[in] identifier[self] . identifier[features] :
identifier[self] . identifier[remove_feature] ( identifier[var] )
keyword[for] identifier[var] keyword[in] identifier[features] :
identifier[self] . identifier[add_feature] ( identifier[var] )
|
def set_features(self, features):
"""Set features in the disco#info object.
All existing features are removed from `self`.
:Parameters:
- `features`: list of features.
:Types:
- `features`: sequence of `unicode`
"""
for var in self.features:
self.remove_feature(var) # depends on [control=['for'], data=['var']]
for var in features:
self.add_feature(var) # depends on [control=['for'], data=['var']]
|
def stmt2enum(enum_type, declare=True, assign=True, wrap=True):
"""Returns a dzn enum declaration from an enum type.
Parameters
----------
enum_type : Enum
The enum to serialize.
declare : bool
Whether to include the ``enum`` declatation keyword in the statement or
just the assignment.
assign : bool
Wheter to include the assignment of the enum in the statement or just
the declaration.
wrap : bool
Whether to wrap the serialized enum.
Returns
-------
str
The serialized dzn representation of the enum.
"""
if not (declare or assign):
raise ValueError(
'The statement must be a declaration or an assignment.'
)
stmt = []
if declare:
stmt.append('enum ')
stmt.append(enum_type.__name__)
if assign:
val_str = []
for v in list(enum_type):
val_str.append(v.name)
val_str = ''.join(['{', ','.join(val_str), '}'])
if wrap:
wrapper = _get_wrapper()
val_str = wrapper.fill(val_str)
stmt.append(' = {}'.format(val_str))
stmt.append(';')
return ''.join(stmt)
|
def function[stmt2enum, parameter[enum_type, declare, assign, wrap]]:
constant[Returns a dzn enum declaration from an enum type.
Parameters
----------
enum_type : Enum
The enum to serialize.
declare : bool
Whether to include the ``enum`` declatation keyword in the statement or
just the assignment.
assign : bool
Wheter to include the assignment of the enum in the statement or just
the declaration.
wrap : bool
Whether to wrap the serialized enum.
Returns
-------
str
The serialized dzn representation of the enum.
]
if <ast.UnaryOp object at 0x7da20c7cb5e0> begin[:]
<ast.Raise object at 0x7da20c7c92d0>
variable[stmt] assign[=] list[[]]
if name[declare] begin[:]
call[name[stmt].append, parameter[constant[enum ]]]
call[name[stmt].append, parameter[name[enum_type].__name__]]
if name[assign] begin[:]
variable[val_str] assign[=] list[[]]
for taget[name[v]] in starred[call[name[list], parameter[name[enum_type]]]] begin[:]
call[name[val_str].append, parameter[name[v].name]]
variable[val_str] assign[=] call[constant[].join, parameter[list[[<ast.Constant object at 0x7da20c7c85e0>, <ast.Call object at 0x7da20c7c9d20>, <ast.Constant object at 0x7da20c7cae00>]]]]
if name[wrap] begin[:]
variable[wrapper] assign[=] call[name[_get_wrapper], parameter[]]
variable[val_str] assign[=] call[name[wrapper].fill, parameter[name[val_str]]]
call[name[stmt].append, parameter[call[constant[ = {}].format, parameter[name[val_str]]]]]
call[name[stmt].append, parameter[constant[;]]]
return[call[constant[].join, parameter[name[stmt]]]]
|
keyword[def] identifier[stmt2enum] ( identifier[enum_type] , identifier[declare] = keyword[True] , identifier[assign] = keyword[True] , identifier[wrap] = keyword[True] ):
literal[string]
keyword[if] keyword[not] ( identifier[declare] keyword[or] identifier[assign] ):
keyword[raise] identifier[ValueError] (
literal[string]
)
identifier[stmt] =[]
keyword[if] identifier[declare] :
identifier[stmt] . identifier[append] ( literal[string] )
identifier[stmt] . identifier[append] ( identifier[enum_type] . identifier[__name__] )
keyword[if] identifier[assign] :
identifier[val_str] =[]
keyword[for] identifier[v] keyword[in] identifier[list] ( identifier[enum_type] ):
identifier[val_str] . identifier[append] ( identifier[v] . identifier[name] )
identifier[val_str] = literal[string] . identifier[join] ([ literal[string] , literal[string] . identifier[join] ( identifier[val_str] ), literal[string] ])
keyword[if] identifier[wrap] :
identifier[wrapper] = identifier[_get_wrapper] ()
identifier[val_str] = identifier[wrapper] . identifier[fill] ( identifier[val_str] )
identifier[stmt] . identifier[append] ( literal[string] . identifier[format] ( identifier[val_str] ))
identifier[stmt] . identifier[append] ( literal[string] )
keyword[return] literal[string] . identifier[join] ( identifier[stmt] )
|
def stmt2enum(enum_type, declare=True, assign=True, wrap=True):
"""Returns a dzn enum declaration from an enum type.
Parameters
----------
enum_type : Enum
The enum to serialize.
declare : bool
Whether to include the ``enum`` declatation keyword in the statement or
just the assignment.
assign : bool
Wheter to include the assignment of the enum in the statement or just
the declaration.
wrap : bool
Whether to wrap the serialized enum.
Returns
-------
str
The serialized dzn representation of the enum.
"""
if not (declare or assign):
raise ValueError('The statement must be a declaration or an assignment.') # depends on [control=['if'], data=[]]
stmt = []
if declare:
stmt.append('enum ') # depends on [control=['if'], data=[]]
stmt.append(enum_type.__name__)
if assign:
val_str = []
for v in list(enum_type):
val_str.append(v.name) # depends on [control=['for'], data=['v']]
val_str = ''.join(['{', ','.join(val_str), '}'])
if wrap:
wrapper = _get_wrapper()
val_str = wrapper.fill(val_str) # depends on [control=['if'], data=[]]
stmt.append(' = {}'.format(val_str)) # depends on [control=['if'], data=[]]
stmt.append(';')
return ''.join(stmt)
|
def as_dict(self):
"""
Serializes the object necessary data in a dictionary.
:returns: Serialized data in a dictionary.
:rtype: dict
"""
result_dict = super(Profile, self).as_dict()
statuses = list()
version = None
titles = list()
descriptions = list()
platforms = list()
selects = list()
for child in self.children:
if isinstance(child, Version):
version = child.as_dict()
elif isinstance(child, Status):
statuses.append(child.as_dict())
elif isinstance(child, Title):
titles.append(child.as_dict())
elif isinstance(child, Description):
descriptions.append(child.as_dict())
elif isinstance(child, Platform):
platforms.append(child.as_dict())
elif isinstance(child, Select):
selects.append(child.as_dict())
if version is not None:
result_dict['version'] = version
if len(statuses) > 0:
result_dict['statuses'] = statuses
if len(titles) > 0:
result_dict['titles'] = titles
if len(descriptions) > 0:
result_dict['descriptions'] = descriptions
if len(platforms) > 0:
result_dict['platforms'] = platforms
if len(selects) > 0:
result_dict['selects'] = selects
return result_dict
|
def function[as_dict, parameter[self]]:
constant[
Serializes the object necessary data in a dictionary.
:returns: Serialized data in a dictionary.
:rtype: dict
]
variable[result_dict] assign[=] call[call[name[super], parameter[name[Profile], name[self]]].as_dict, parameter[]]
variable[statuses] assign[=] call[name[list], parameter[]]
variable[version] assign[=] constant[None]
variable[titles] assign[=] call[name[list], parameter[]]
variable[descriptions] assign[=] call[name[list], parameter[]]
variable[platforms] assign[=] call[name[list], parameter[]]
variable[selects] assign[=] call[name[list], parameter[]]
for taget[name[child]] in starred[name[self].children] begin[:]
if call[name[isinstance], parameter[name[child], name[Version]]] begin[:]
variable[version] assign[=] call[name[child].as_dict, parameter[]]
if compare[name[version] is_not constant[None]] begin[:]
call[name[result_dict]][constant[version]] assign[=] name[version]
if compare[call[name[len], parameter[name[statuses]]] greater[>] constant[0]] begin[:]
call[name[result_dict]][constant[statuses]] assign[=] name[statuses]
if compare[call[name[len], parameter[name[titles]]] greater[>] constant[0]] begin[:]
call[name[result_dict]][constant[titles]] assign[=] name[titles]
if compare[call[name[len], parameter[name[descriptions]]] greater[>] constant[0]] begin[:]
call[name[result_dict]][constant[descriptions]] assign[=] name[descriptions]
if compare[call[name[len], parameter[name[platforms]]] greater[>] constant[0]] begin[:]
call[name[result_dict]][constant[platforms]] assign[=] name[platforms]
if compare[call[name[len], parameter[name[selects]]] greater[>] constant[0]] begin[:]
call[name[result_dict]][constant[selects]] assign[=] name[selects]
return[name[result_dict]]
|
keyword[def] identifier[as_dict] ( identifier[self] ):
literal[string]
identifier[result_dict] = identifier[super] ( identifier[Profile] , identifier[self] ). identifier[as_dict] ()
identifier[statuses] = identifier[list] ()
identifier[version] = keyword[None]
identifier[titles] = identifier[list] ()
identifier[descriptions] = identifier[list] ()
identifier[platforms] = identifier[list] ()
identifier[selects] = identifier[list] ()
keyword[for] identifier[child] keyword[in] identifier[self] . identifier[children] :
keyword[if] identifier[isinstance] ( identifier[child] , identifier[Version] ):
identifier[version] = identifier[child] . identifier[as_dict] ()
keyword[elif] identifier[isinstance] ( identifier[child] , identifier[Status] ):
identifier[statuses] . identifier[append] ( identifier[child] . identifier[as_dict] ())
keyword[elif] identifier[isinstance] ( identifier[child] , identifier[Title] ):
identifier[titles] . identifier[append] ( identifier[child] . identifier[as_dict] ())
keyword[elif] identifier[isinstance] ( identifier[child] , identifier[Description] ):
identifier[descriptions] . identifier[append] ( identifier[child] . identifier[as_dict] ())
keyword[elif] identifier[isinstance] ( identifier[child] , identifier[Platform] ):
identifier[platforms] . identifier[append] ( identifier[child] . identifier[as_dict] ())
keyword[elif] identifier[isinstance] ( identifier[child] , identifier[Select] ):
identifier[selects] . identifier[append] ( identifier[child] . identifier[as_dict] ())
keyword[if] identifier[version] keyword[is] keyword[not] keyword[None] :
identifier[result_dict] [ literal[string] ]= identifier[version]
keyword[if] identifier[len] ( identifier[statuses] )> literal[int] :
identifier[result_dict] [ literal[string] ]= identifier[statuses]
keyword[if] identifier[len] ( identifier[titles] )> literal[int] :
identifier[result_dict] [ literal[string] ]= identifier[titles]
keyword[if] identifier[len] ( identifier[descriptions] )> literal[int] :
identifier[result_dict] [ literal[string] ]= identifier[descriptions]
keyword[if] identifier[len] ( identifier[platforms] )> literal[int] :
identifier[result_dict] [ literal[string] ]= identifier[platforms]
keyword[if] identifier[len] ( identifier[selects] )> literal[int] :
identifier[result_dict] [ literal[string] ]= identifier[selects]
keyword[return] identifier[result_dict]
|
def as_dict(self):
"""
Serializes the object necessary data in a dictionary.
:returns: Serialized data in a dictionary.
:rtype: dict
"""
result_dict = super(Profile, self).as_dict()
statuses = list()
version = None
titles = list()
descriptions = list()
platforms = list()
selects = list()
for child in self.children:
if isinstance(child, Version):
version = child.as_dict() # depends on [control=['if'], data=[]]
elif isinstance(child, Status):
statuses.append(child.as_dict()) # depends on [control=['if'], data=[]]
elif isinstance(child, Title):
titles.append(child.as_dict()) # depends on [control=['if'], data=[]]
elif isinstance(child, Description):
descriptions.append(child.as_dict()) # depends on [control=['if'], data=[]]
elif isinstance(child, Platform):
platforms.append(child.as_dict()) # depends on [control=['if'], data=[]]
elif isinstance(child, Select):
selects.append(child.as_dict()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['child']]
if version is not None:
result_dict['version'] = version # depends on [control=['if'], data=['version']]
if len(statuses) > 0:
result_dict['statuses'] = statuses # depends on [control=['if'], data=[]]
if len(titles) > 0:
result_dict['titles'] = titles # depends on [control=['if'], data=[]]
if len(descriptions) > 0:
result_dict['descriptions'] = descriptions # depends on [control=['if'], data=[]]
if len(platforms) > 0:
result_dict['platforms'] = platforms # depends on [control=['if'], data=[]]
if len(selects) > 0:
result_dict['selects'] = selects # depends on [control=['if'], data=[]]
return result_dict
|
def import_by_path(path):
"""
Import functions or class by their path. Should be of the form:
path.to.module.func
"""
if not isinstance(path, str):
return path
module_path, *name = path.rsplit('.', 1)
func = import_module(module_path)
if name:
func = getattr(func, name[0])
return func
|
def function[import_by_path, parameter[path]]:
constant[
Import functions or class by their path. Should be of the form:
path.to.module.func
]
if <ast.UnaryOp object at 0x7da1b0399000> begin[:]
return[name[path]]
<ast.Tuple object at 0x7da1b039ace0> assign[=] call[name[path].rsplit, parameter[constant[.], constant[1]]]
variable[func] assign[=] call[name[import_module], parameter[name[module_path]]]
if name[name] begin[:]
variable[func] assign[=] call[name[getattr], parameter[name[func], call[name[name]][constant[0]]]]
return[name[func]]
|
keyword[def] identifier[import_by_path] ( identifier[path] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[path] , identifier[str] ):
keyword[return] identifier[path]
identifier[module_path] ,* identifier[name] = identifier[path] . identifier[rsplit] ( literal[string] , literal[int] )
identifier[func] = identifier[import_module] ( identifier[module_path] )
keyword[if] identifier[name] :
identifier[func] = identifier[getattr] ( identifier[func] , identifier[name] [ literal[int] ])
keyword[return] identifier[func]
|
def import_by_path(path):
"""
Import functions or class by their path. Should be of the form:
path.to.module.func
"""
if not isinstance(path, str):
return path # depends on [control=['if'], data=[]]
(module_path, *name) = path.rsplit('.', 1)
func = import_module(module_path)
if name:
func = getattr(func, name[0]) # depends on [control=['if'], data=[]]
return func
|
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
must be specified. Encodings that are not ASCII based (such as UCS-2)
are not allowed and should be decoded to ``unicode`` first.
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN, null, true, false.
This can be used to raise an exception if invalid JSON numbers
are encountered.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
return cls(encoding=encoding, **kw).decode(s)
|
def function[loads, parameter[s, encoding, cls, object_hook, parse_float, parse_int, parse_constant]]:
constant[Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
must be specified. Encodings that are not ASCII based (such as UCS-2)
are not allowed and should be decoded to ``unicode`` first.
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN, null, true, false.
This can be used to raise an exception if invalid JSON numbers
are encountered.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
]
if <ast.BoolOp object at 0x7da1b1dd1ab0> begin[:]
return[call[name[_default_decoder].decode, parameter[name[s]]]]
if compare[name[cls] is constant[None]] begin[:]
variable[cls] assign[=] name[JSONDecoder]
if compare[name[object_hook] is_not constant[None]] begin[:]
call[name[kw]][constant[object_hook]] assign[=] name[object_hook]
if compare[name[parse_float] is_not constant[None]] begin[:]
call[name[kw]][constant[parse_float]] assign[=] name[parse_float]
if compare[name[parse_int] is_not constant[None]] begin[:]
call[name[kw]][constant[parse_int]] assign[=] name[parse_int]
if compare[name[parse_constant] is_not constant[None]] begin[:]
call[name[kw]][constant[parse_constant]] assign[=] name[parse_constant]
return[call[call[name[cls], parameter[]].decode, parameter[name[s]]]]
|
keyword[def] identifier[loads] ( identifier[s] , identifier[encoding] = keyword[None] , identifier[cls] = keyword[None] , identifier[object_hook] = keyword[None] , identifier[parse_float] = keyword[None] ,
identifier[parse_int] = keyword[None] , identifier[parse_constant] = keyword[None] ,** identifier[kw] ):
literal[string]
keyword[if] ( identifier[cls] keyword[is] keyword[None] keyword[and] identifier[encoding] keyword[is] keyword[None] keyword[and] identifier[object_hook] keyword[is] keyword[None] keyword[and]
identifier[parse_int] keyword[is] keyword[None] keyword[and] identifier[parse_float] keyword[is] keyword[None] keyword[and]
identifier[parse_constant] keyword[is] keyword[None] keyword[and] keyword[not] identifier[kw] ):
keyword[return] identifier[_default_decoder] . identifier[decode] ( identifier[s] )
keyword[if] identifier[cls] keyword[is] keyword[None] :
identifier[cls] = identifier[JSONDecoder]
keyword[if] identifier[object_hook] keyword[is] keyword[not] keyword[None] :
identifier[kw] [ literal[string] ]= identifier[object_hook]
keyword[if] identifier[parse_float] keyword[is] keyword[not] keyword[None] :
identifier[kw] [ literal[string] ]= identifier[parse_float]
keyword[if] identifier[parse_int] keyword[is] keyword[not] keyword[None] :
identifier[kw] [ literal[string] ]= identifier[parse_int]
keyword[if] identifier[parse_constant] keyword[is] keyword[not] keyword[None] :
identifier[kw] [ literal[string] ]= identifier[parse_constant]
keyword[return] identifier[cls] ( identifier[encoding] = identifier[encoding] ,** identifier[kw] ). identifier[decode] ( identifier[s] )
|
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
must be specified. Encodings that are not ASCII based (such as UCS-2)
are not allowed and should be decoded to ``unicode`` first.
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN, null, true, false.
This can be used to raise an exception if invalid JSON numbers
are encountered.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
if cls is None and encoding is None and (object_hook is None) and (parse_int is None) and (parse_float is None) and (parse_constant is None) and (not kw):
return _default_decoder.decode(s) # depends on [control=['if'], data=[]]
if cls is None:
cls = JSONDecoder # depends on [control=['if'], data=['cls']]
if object_hook is not None:
kw['object_hook'] = object_hook # depends on [control=['if'], data=['object_hook']]
if parse_float is not None:
kw['parse_float'] = parse_float # depends on [control=['if'], data=['parse_float']]
if parse_int is not None:
kw['parse_int'] = parse_int # depends on [control=['if'], data=['parse_int']]
if parse_constant is not None:
kw['parse_constant'] = parse_constant # depends on [control=['if'], data=['parse_constant']]
return cls(encoding=encoding, **kw).decode(s)
|
def content_type(self, response):
"""
Ensures the response is of type
:param obj response: The scrapy response
:return bool: Determines wether the response is of the correct type
"""
if not re_html.match(response.headers.get('Content-Type').decode('utf-8')):
self.log.warn(
"Dropped: %s's content is not of type "
"text/html but %s", response.url, response.headers.get('Content-Type')
)
return False
else:
return True
|
def function[content_type, parameter[self, response]]:
constant[
Ensures the response is of type
:param obj response: The scrapy response
:return bool: Determines wether the response is of the correct type
]
if <ast.UnaryOp object at 0x7da20c992830> begin[:]
call[name[self].log.warn, parameter[constant[Dropped: %s's content is not of type text/html but %s], name[response].url, call[name[response].headers.get, parameter[constant[Content-Type]]]]]
return[constant[False]]
|
keyword[def] identifier[content_type] ( identifier[self] , identifier[response] ):
literal[string]
keyword[if] keyword[not] identifier[re_html] . identifier[match] ( identifier[response] . identifier[headers] . identifier[get] ( literal[string] ). identifier[decode] ( literal[string] )):
identifier[self] . identifier[log] . identifier[warn] (
literal[string]
literal[string] , identifier[response] . identifier[url] , identifier[response] . identifier[headers] . identifier[get] ( literal[string] )
)
keyword[return] keyword[False]
keyword[else] :
keyword[return] keyword[True]
|
def content_type(self, response):
"""
Ensures the response is of type
:param obj response: The scrapy response
:return bool: Determines wether the response is of the correct type
"""
if not re_html.match(response.headers.get('Content-Type').decode('utf-8')):
self.log.warn("Dropped: %s's content is not of type text/html but %s", response.url, response.headers.get('Content-Type'))
return False # depends on [control=['if'], data=[]]
else:
return True
|
def xhdr(self, header, msgid_range=None):
"""XHDR command.
"""
args = header
if range is not None:
args += " " + utils.unparse_msgid_range(msgid_range)
code, message = self.command("XHDR", args)
if code != 221:
raise NNTPReplyError(code, message)
return self.info(code, message)
|
def function[xhdr, parameter[self, header, msgid_range]]:
constant[XHDR command.
]
variable[args] assign[=] name[header]
if compare[name[range] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b013d8a0>
<ast.Tuple object at 0x7da1b013c8b0> assign[=] call[name[self].command, parameter[constant[XHDR], name[args]]]
if compare[name[code] not_equal[!=] constant[221]] begin[:]
<ast.Raise object at 0x7da1b013db70>
return[call[name[self].info, parameter[name[code], name[message]]]]
|
keyword[def] identifier[xhdr] ( identifier[self] , identifier[header] , identifier[msgid_range] = keyword[None] ):
literal[string]
identifier[args] = identifier[header]
keyword[if] identifier[range] keyword[is] keyword[not] keyword[None] :
identifier[args] += literal[string] + identifier[utils] . identifier[unparse_msgid_range] ( identifier[msgid_range] )
identifier[code] , identifier[message] = identifier[self] . identifier[command] ( literal[string] , identifier[args] )
keyword[if] identifier[code] != literal[int] :
keyword[raise] identifier[NNTPReplyError] ( identifier[code] , identifier[message] )
keyword[return] identifier[self] . identifier[info] ( identifier[code] , identifier[message] )
|
def xhdr(self, header, msgid_range=None):
"""XHDR command.
"""
args = header
if range is not None:
args += ' ' + utils.unparse_msgid_range(msgid_range) # depends on [control=['if'], data=[]]
(code, message) = self.command('XHDR', args)
if code != 221:
raise NNTPReplyError(code, message) # depends on [control=['if'], data=['code']]
return self.info(code, message)
|
def say(self, message=None, voice=None, loop=None, language=None, **kwargs):
"""
Create a <Say> element
:param message: Message to say
:param voice: Voice to use
:param loop: Times to loop message
:param language: Message langauge
:param kwargs: additional attributes
:returns: <Say> element
"""
return self.nest(Say(message=message, voice=voice, loop=loop, language=language, **kwargs))
|
def function[say, parameter[self, message, voice, loop, language]]:
constant[
Create a <Say> element
:param message: Message to say
:param voice: Voice to use
:param loop: Times to loop message
:param language: Message langauge
:param kwargs: additional attributes
:returns: <Say> element
]
return[call[name[self].nest, parameter[call[name[Say], parameter[]]]]]
|
keyword[def] identifier[say] ( identifier[self] , identifier[message] = keyword[None] , identifier[voice] = keyword[None] , identifier[loop] = keyword[None] , identifier[language] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[nest] ( identifier[Say] ( identifier[message] = identifier[message] , identifier[voice] = identifier[voice] , identifier[loop] = identifier[loop] , identifier[language] = identifier[language] ,** identifier[kwargs] ))
|
def say(self, message=None, voice=None, loop=None, language=None, **kwargs):
"""
Create a <Say> element
:param message: Message to say
:param voice: Voice to use
:param loop: Times to loop message
:param language: Message langauge
:param kwargs: additional attributes
:returns: <Say> element
"""
return self.nest(Say(message=message, voice=voice, loop=loop, language=language, **kwargs))
|
def calc_wind_chill(t, windspeed, windspeed10min=None):
'''
calculates the wind chill value based upon the temperature (F) and
wind.
returns the wind chill in degrees F.
'''
w = max(windspeed10min, windspeed)
return 35.74 + 0.6215 * t - 35.75 * (w ** 0.16) + 0.4275 * t * (w ** 0.16);
|
def function[calc_wind_chill, parameter[t, windspeed, windspeed10min]]:
constant[
calculates the wind chill value based upon the temperature (F) and
wind.
returns the wind chill in degrees F.
]
variable[w] assign[=] call[name[max], parameter[name[windspeed10min], name[windspeed]]]
return[binary_operation[binary_operation[binary_operation[constant[35.74] + binary_operation[constant[0.6215] * name[t]]] - binary_operation[constant[35.75] * binary_operation[name[w] ** constant[0.16]]]] + binary_operation[binary_operation[constant[0.4275] * name[t]] * binary_operation[name[w] ** constant[0.16]]]]]
|
keyword[def] identifier[calc_wind_chill] ( identifier[t] , identifier[windspeed] , identifier[windspeed10min] = keyword[None] ):
literal[string]
identifier[w] = identifier[max] ( identifier[windspeed10min] , identifier[windspeed] )
keyword[return] literal[int] + literal[int] * identifier[t] - literal[int] *( identifier[w] ** literal[int] )+ literal[int] * identifier[t] *( identifier[w] ** literal[int] );
|
def calc_wind_chill(t, windspeed, windspeed10min=None):
"""
calculates the wind chill value based upon the temperature (F) and
wind.
returns the wind chill in degrees F.
"""
w = max(windspeed10min, windspeed)
return 35.74 + 0.6215 * t - 35.75 * w ** 0.16 + 0.4275 * t * w ** 0.16
|
def forward(self, ploc, plabel, gloc, glabel):
"""
ploc, plabel: Nx4x8732, Nxlabel_numx8732
predicted location and labels
gloc, glabel: Nx4x8732, Nx8732
ground truth location and labels
"""
mask = glabel > 0
pos_num = mask.sum(dim=1)
vec_gd = self._loc_vec(gloc)
# sum on four coordinates, and mask
sl1 = self.sl1_loss(ploc, vec_gd).sum(dim=1)
sl1 = (mask.float()*sl1).sum(dim=1)
# hard negative mining
con = self.con_loss(plabel, glabel)
# postive mask will never selected
con_neg = con.clone()
con_neg[mask] = 0
_, con_idx = con_neg.sort(dim=1, descending=True)
_, con_rank = con_idx.sort(dim=1)
# number of negative three times positive
neg_num = torch.clamp(3*pos_num, max=mask.size(1)).unsqueeze(-1)
neg_mask = con_rank < neg_num
closs = (con*(mask.float() + neg_mask.float())).sum(dim=1)
# avoid no object detected
total_loss = sl1 + closs
num_mask = (pos_num > 0).float()
pos_num = pos_num.float().clamp(min=1e-6)
ret = (total_loss*num_mask/pos_num).mean(dim=0)
return ret
|
def function[forward, parameter[self, ploc, plabel, gloc, glabel]]:
constant[
ploc, plabel: Nx4x8732, Nxlabel_numx8732
predicted location and labels
gloc, glabel: Nx4x8732, Nx8732
ground truth location and labels
]
variable[mask] assign[=] compare[name[glabel] greater[>] constant[0]]
variable[pos_num] assign[=] call[name[mask].sum, parameter[]]
variable[vec_gd] assign[=] call[name[self]._loc_vec, parameter[name[gloc]]]
variable[sl1] assign[=] call[call[name[self].sl1_loss, parameter[name[ploc], name[vec_gd]]].sum, parameter[]]
variable[sl1] assign[=] call[binary_operation[call[name[mask].float, parameter[]] * name[sl1]].sum, parameter[]]
variable[con] assign[=] call[name[self].con_loss, parameter[name[plabel], name[glabel]]]
variable[con_neg] assign[=] call[name[con].clone, parameter[]]
call[name[con_neg]][name[mask]] assign[=] constant[0]
<ast.Tuple object at 0x7da1b1b03880> assign[=] call[name[con_neg].sort, parameter[]]
<ast.Tuple object at 0x7da1b1b00a30> assign[=] call[name[con_idx].sort, parameter[]]
variable[neg_num] assign[=] call[call[name[torch].clamp, parameter[binary_operation[constant[3] * name[pos_num]]]].unsqueeze, parameter[<ast.UnaryOp object at 0x7da1b1b02d70>]]
variable[neg_mask] assign[=] compare[name[con_rank] less[<] name[neg_num]]
variable[closs] assign[=] call[binary_operation[name[con] * binary_operation[call[name[mask].float, parameter[]] + call[name[neg_mask].float, parameter[]]]].sum, parameter[]]
variable[total_loss] assign[=] binary_operation[name[sl1] + name[closs]]
variable[num_mask] assign[=] call[compare[name[pos_num] greater[>] constant[0]].float, parameter[]]
variable[pos_num] assign[=] call[call[name[pos_num].float, parameter[]].clamp, parameter[]]
variable[ret] assign[=] call[binary_operation[binary_operation[name[total_loss] * name[num_mask]] / name[pos_num]].mean, parameter[]]
return[name[ret]]
|
keyword[def] identifier[forward] ( identifier[self] , identifier[ploc] , identifier[plabel] , identifier[gloc] , identifier[glabel] ):
literal[string]
identifier[mask] = identifier[glabel] > literal[int]
identifier[pos_num] = identifier[mask] . identifier[sum] ( identifier[dim] = literal[int] )
identifier[vec_gd] = identifier[self] . identifier[_loc_vec] ( identifier[gloc] )
identifier[sl1] = identifier[self] . identifier[sl1_loss] ( identifier[ploc] , identifier[vec_gd] ). identifier[sum] ( identifier[dim] = literal[int] )
identifier[sl1] =( identifier[mask] . identifier[float] ()* identifier[sl1] ). identifier[sum] ( identifier[dim] = literal[int] )
identifier[con] = identifier[self] . identifier[con_loss] ( identifier[plabel] , identifier[glabel] )
identifier[con_neg] = identifier[con] . identifier[clone] ()
identifier[con_neg] [ identifier[mask] ]= literal[int]
identifier[_] , identifier[con_idx] = identifier[con_neg] . identifier[sort] ( identifier[dim] = literal[int] , identifier[descending] = keyword[True] )
identifier[_] , identifier[con_rank] = identifier[con_idx] . identifier[sort] ( identifier[dim] = literal[int] )
identifier[neg_num] = identifier[torch] . identifier[clamp] ( literal[int] * identifier[pos_num] , identifier[max] = identifier[mask] . identifier[size] ( literal[int] )). identifier[unsqueeze] (- literal[int] )
identifier[neg_mask] = identifier[con_rank] < identifier[neg_num]
identifier[closs] =( identifier[con] *( identifier[mask] . identifier[float] ()+ identifier[neg_mask] . identifier[float] ())). identifier[sum] ( identifier[dim] = literal[int] )
identifier[total_loss] = identifier[sl1] + identifier[closs]
identifier[num_mask] =( identifier[pos_num] > literal[int] ). identifier[float] ()
identifier[pos_num] = identifier[pos_num] . identifier[float] (). identifier[clamp] ( identifier[min] = literal[int] )
identifier[ret] =( identifier[total_loss] * identifier[num_mask] / identifier[pos_num] ). identifier[mean] ( identifier[dim] = literal[int] )
keyword[return] identifier[ret]
|
def forward(self, ploc, plabel, gloc, glabel):
"""
ploc, plabel: Nx4x8732, Nxlabel_numx8732
predicted location and labels
gloc, glabel: Nx4x8732, Nx8732
ground truth location and labels
"""
mask = glabel > 0
pos_num = mask.sum(dim=1)
vec_gd = self._loc_vec(gloc)
# sum on four coordinates, and mask
sl1 = self.sl1_loss(ploc, vec_gd).sum(dim=1)
sl1 = (mask.float() * sl1).sum(dim=1)
# hard negative mining
con = self.con_loss(plabel, glabel)
# postive mask will never selected
con_neg = con.clone()
con_neg[mask] = 0
(_, con_idx) = con_neg.sort(dim=1, descending=True)
(_, con_rank) = con_idx.sort(dim=1)
# number of negative three times positive
neg_num = torch.clamp(3 * pos_num, max=mask.size(1)).unsqueeze(-1)
neg_mask = con_rank < neg_num
closs = (con * (mask.float() + neg_mask.float())).sum(dim=1)
# avoid no object detected
total_loss = sl1 + closs
num_mask = (pos_num > 0).float()
pos_num = pos_num.float().clamp(min=1e-06)
ret = (total_loss * num_mask / pos_num).mean(dim=0)
return ret
|
def do_debug(self, args):
"""Implementation of 'coverage debug'."""
if not args:
self.help_fn("What information would you like: data, sys?")
return ERR
for info in args:
if info == 'sys':
print("-- sys ----------------------------------------")
for line in info_formatter(self.coverage.sysinfo()):
print(" %s" % line)
elif info == 'data':
print("-- data ---------------------------------------")
self.coverage.load()
print("path: %s" % self.coverage.data.filename)
print("has_arcs: %r" % self.coverage.data.has_arcs())
summary = self.coverage.data.summary(fullpath=True)
if summary:
filenames = sorted(summary.keys())
print("\n%d files:" % len(filenames))
for f in filenames:
print("%s: %d lines" % (f, summary[f]))
else:
print("No data collected")
else:
self.help_fn("Don't know what you mean by %r" % info)
return ERR
return OK
|
def function[do_debug, parameter[self, args]]:
constant[Implementation of 'coverage debug'.]
if <ast.UnaryOp object at 0x7da18ede6f80> begin[:]
call[name[self].help_fn, parameter[constant[What information would you like: data, sys?]]]
return[name[ERR]]
for taget[name[info]] in starred[name[args]] begin[:]
if compare[name[info] equal[==] constant[sys]] begin[:]
call[name[print], parameter[constant[-- sys ----------------------------------------]]]
for taget[name[line]] in starred[call[name[info_formatter], parameter[call[name[self].coverage.sysinfo, parameter[]]]]] begin[:]
call[name[print], parameter[binary_operation[constant[ %s] <ast.Mod object at 0x7da2590d6920> name[line]]]]
return[name[OK]]
|
keyword[def] identifier[do_debug] ( identifier[self] , identifier[args] ):
literal[string]
keyword[if] keyword[not] identifier[args] :
identifier[self] . identifier[help_fn] ( literal[string] )
keyword[return] identifier[ERR]
keyword[for] identifier[info] keyword[in] identifier[args] :
keyword[if] identifier[info] == literal[string] :
identifier[print] ( literal[string] )
keyword[for] identifier[line] keyword[in] identifier[info_formatter] ( identifier[self] . identifier[coverage] . identifier[sysinfo] ()):
identifier[print] ( literal[string] % identifier[line] )
keyword[elif] identifier[info] == literal[string] :
identifier[print] ( literal[string] )
identifier[self] . identifier[coverage] . identifier[load] ()
identifier[print] ( literal[string] % identifier[self] . identifier[coverage] . identifier[data] . identifier[filename] )
identifier[print] ( literal[string] % identifier[self] . identifier[coverage] . identifier[data] . identifier[has_arcs] ())
identifier[summary] = identifier[self] . identifier[coverage] . identifier[data] . identifier[summary] ( identifier[fullpath] = keyword[True] )
keyword[if] identifier[summary] :
identifier[filenames] = identifier[sorted] ( identifier[summary] . identifier[keys] ())
identifier[print] ( literal[string] % identifier[len] ( identifier[filenames] ))
keyword[for] identifier[f] keyword[in] identifier[filenames] :
identifier[print] ( literal[string] %( identifier[f] , identifier[summary] [ identifier[f] ]))
keyword[else] :
identifier[print] ( literal[string] )
keyword[else] :
identifier[self] . identifier[help_fn] ( literal[string] % identifier[info] )
keyword[return] identifier[ERR]
keyword[return] identifier[OK]
|
def do_debug(self, args):
"""Implementation of 'coverage debug'."""
if not args:
self.help_fn('What information would you like: data, sys?')
return ERR # depends on [control=['if'], data=[]]
for info in args:
if info == 'sys':
print('-- sys ----------------------------------------')
for line in info_formatter(self.coverage.sysinfo()):
print(' %s' % line) # depends on [control=['for'], data=['line']] # depends on [control=['if'], data=[]]
elif info == 'data':
print('-- data ---------------------------------------')
self.coverage.load()
print('path: %s' % self.coverage.data.filename)
print('has_arcs: %r' % self.coverage.data.has_arcs())
summary = self.coverage.data.summary(fullpath=True)
if summary:
filenames = sorted(summary.keys())
print('\n%d files:' % len(filenames))
for f in filenames:
print('%s: %d lines' % (f, summary[f])) # depends on [control=['for'], data=['f']] # depends on [control=['if'], data=[]]
else:
print('No data collected') # depends on [control=['if'], data=[]]
else:
self.help_fn("Don't know what you mean by %r" % info)
return ERR # depends on [control=['for'], data=['info']]
return OK
|
def load_rdkit_mol(self, mol):
"""
Return molecular data from :class:`rdkit.Chem.rdchem.Mol` object.
Parameters
----------
mol : :class:`rdkit.Chem.rdchem.Mol`
A molecule object from RDKit.
Returns
-------
:class:`dict`
A dictionary with ``elements`` and ``coordinates`` as keys
containing molecular data extracted from
:class:`rdkit.Chem.rdchem.Mol` object.
"""
self.system = {
'elements': np.empty(
mol.GetNumAtoms(), dtype=str),
'coordinates': np.empty((mol.GetNumAtoms(), 3))
}
for atom in mol.GetAtoms():
atom_id = atom.GetIdx()
atom_sym = atom.GetSymbol()
x, y, z = mol.GetConformer().GetAtomPosition(atom_id)
self.system['elements'][atom_id] = atom_sym
self.system['coordinates'][atom_id] = x, y, z
return self.system
|
def function[load_rdkit_mol, parameter[self, mol]]:
constant[
Return molecular data from :class:`rdkit.Chem.rdchem.Mol` object.
Parameters
----------
mol : :class:`rdkit.Chem.rdchem.Mol`
A molecule object from RDKit.
Returns
-------
:class:`dict`
A dictionary with ``elements`` and ``coordinates`` as keys
containing molecular data extracted from
:class:`rdkit.Chem.rdchem.Mol` object.
]
name[self].system assign[=] dictionary[[<ast.Constant object at 0x7da18dc9b580>, <ast.Constant object at 0x7da18dc9bc70>], [<ast.Call object at 0x7da18dc99a20>, <ast.Call object at 0x7da18dc9ae60>]]
for taget[name[atom]] in starred[call[name[mol].GetAtoms, parameter[]]] begin[:]
variable[atom_id] assign[=] call[name[atom].GetIdx, parameter[]]
variable[atom_sym] assign[=] call[name[atom].GetSymbol, parameter[]]
<ast.Tuple object at 0x7da18dc99c90> assign[=] call[call[name[mol].GetConformer, parameter[]].GetAtomPosition, parameter[name[atom_id]]]
call[call[name[self].system][constant[elements]]][name[atom_id]] assign[=] name[atom_sym]
call[call[name[self].system][constant[coordinates]]][name[atom_id]] assign[=] tuple[[<ast.Name object at 0x7da20c6e46a0>, <ast.Name object at 0x7da20c6e49d0>, <ast.Name object at 0x7da20c6e5360>]]
return[name[self].system]
|
keyword[def] identifier[load_rdkit_mol] ( identifier[self] , identifier[mol] ):
literal[string]
identifier[self] . identifier[system] ={
literal[string] : identifier[np] . identifier[empty] (
identifier[mol] . identifier[GetNumAtoms] (), identifier[dtype] = identifier[str] ),
literal[string] : identifier[np] . identifier[empty] (( identifier[mol] . identifier[GetNumAtoms] (), literal[int] ))
}
keyword[for] identifier[atom] keyword[in] identifier[mol] . identifier[GetAtoms] ():
identifier[atom_id] = identifier[atom] . identifier[GetIdx] ()
identifier[atom_sym] = identifier[atom] . identifier[GetSymbol] ()
identifier[x] , identifier[y] , identifier[z] = identifier[mol] . identifier[GetConformer] (). identifier[GetAtomPosition] ( identifier[atom_id] )
identifier[self] . identifier[system] [ literal[string] ][ identifier[atom_id] ]= identifier[atom_sym]
identifier[self] . identifier[system] [ literal[string] ][ identifier[atom_id] ]= identifier[x] , identifier[y] , identifier[z]
keyword[return] identifier[self] . identifier[system]
|
def load_rdkit_mol(self, mol):
"""
Return molecular data from :class:`rdkit.Chem.rdchem.Mol` object.
Parameters
----------
mol : :class:`rdkit.Chem.rdchem.Mol`
A molecule object from RDKit.
Returns
-------
:class:`dict`
A dictionary with ``elements`` and ``coordinates`` as keys
containing molecular data extracted from
:class:`rdkit.Chem.rdchem.Mol` object.
"""
self.system = {'elements': np.empty(mol.GetNumAtoms(), dtype=str), 'coordinates': np.empty((mol.GetNumAtoms(), 3))}
for atom in mol.GetAtoms():
atom_id = atom.GetIdx()
atom_sym = atom.GetSymbol()
(x, y, z) = mol.GetConformer().GetAtomPosition(atom_id)
self.system['elements'][atom_id] = atom_sym
self.system['coordinates'][atom_id] = (x, y, z) # depends on [control=['for'], data=['atom']]
return self.system
|
def update(self):
"""Update CPU stats using the input method."""
# Grab stats into self.stats
if self.input_method == 'local':
stats = self.update_local()
elif self.input_method == 'snmp':
stats = self.update_snmp()
else:
stats = self.get_init_value()
# Update the stats
self.stats = stats
return self.stats
|
def function[update, parameter[self]]:
constant[Update CPU stats using the input method.]
if compare[name[self].input_method equal[==] constant[local]] begin[:]
variable[stats] assign[=] call[name[self].update_local, parameter[]]
name[self].stats assign[=] name[stats]
return[name[self].stats]
|
keyword[def] identifier[update] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[input_method] == literal[string] :
identifier[stats] = identifier[self] . identifier[update_local] ()
keyword[elif] identifier[self] . identifier[input_method] == literal[string] :
identifier[stats] = identifier[self] . identifier[update_snmp] ()
keyword[else] :
identifier[stats] = identifier[self] . identifier[get_init_value] ()
identifier[self] . identifier[stats] = identifier[stats]
keyword[return] identifier[self] . identifier[stats]
|
def update(self):
"""Update CPU stats using the input method."""
# Grab stats into self.stats
if self.input_method == 'local':
stats = self.update_local() # depends on [control=['if'], data=[]]
elif self.input_method == 'snmp':
stats = self.update_snmp() # depends on [control=['if'], data=[]]
else:
stats = self.get_init_value()
# Update the stats
self.stats = stats
return self.stats
|
def _updateKW(image, filename, exten, skyKW, Value):
"""update the header with the kw,value"""
# Update the value in memory
image.header[skyKW] = Value
# Now update the value on disk
if isinstance(exten,tuple):
strexten = '[%s,%s]'%(exten[0],str(exten[1]))
else:
strexten = '[%s]'%(exten)
log.info('Updating keyword %s in %s' % (skyKW, filename + strexten))
fobj = fileutil.openImage(filename, mode='update', memmap=False)
fobj[exten].header[skyKW] = (Value, 'Sky value computed by AstroDrizzle')
fobj.close()
|
def function[_updateKW, parameter[image, filename, exten, skyKW, Value]]:
constant[update the header with the kw,value]
call[name[image].header][name[skyKW]] assign[=] name[Value]
if call[name[isinstance], parameter[name[exten], name[tuple]]] begin[:]
variable[strexten] assign[=] binary_operation[constant[[%s,%s]] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b1b9cc40>, <ast.Call object at 0x7da1b1b9cb20>]]]
call[name[log].info, parameter[binary_operation[constant[Updating keyword %s in %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1b9c820>, <ast.BinOp object at 0x7da1b1b9c8b0>]]]]]
variable[fobj] assign[=] call[name[fileutil].openImage, parameter[name[filename]]]
call[call[name[fobj]][name[exten]].header][name[skyKW]] assign[=] tuple[[<ast.Name object at 0x7da1b1b9ded0>, <ast.Constant object at 0x7da1b1b9de70>]]
call[name[fobj].close, parameter[]]
|
keyword[def] identifier[_updateKW] ( identifier[image] , identifier[filename] , identifier[exten] , identifier[skyKW] , identifier[Value] ):
literal[string]
identifier[image] . identifier[header] [ identifier[skyKW] ]= identifier[Value]
keyword[if] identifier[isinstance] ( identifier[exten] , identifier[tuple] ):
identifier[strexten] = literal[string] %( identifier[exten] [ literal[int] ], identifier[str] ( identifier[exten] [ literal[int] ]))
keyword[else] :
identifier[strexten] = literal[string] %( identifier[exten] )
identifier[log] . identifier[info] ( literal[string] %( identifier[skyKW] , identifier[filename] + identifier[strexten] ))
identifier[fobj] = identifier[fileutil] . identifier[openImage] ( identifier[filename] , identifier[mode] = literal[string] , identifier[memmap] = keyword[False] )
identifier[fobj] [ identifier[exten] ]. identifier[header] [ identifier[skyKW] ]=( identifier[Value] , literal[string] )
identifier[fobj] . identifier[close] ()
|
def _updateKW(image, filename, exten, skyKW, Value):
"""update the header with the kw,value"""
# Update the value in memory
image.header[skyKW] = Value
# Now update the value on disk
if isinstance(exten, tuple):
strexten = '[%s,%s]' % (exten[0], str(exten[1])) # depends on [control=['if'], data=[]]
else:
strexten = '[%s]' % exten
log.info('Updating keyword %s in %s' % (skyKW, filename + strexten))
fobj = fileutil.openImage(filename, mode='update', memmap=False)
fobj[exten].header[skyKW] = (Value, 'Sky value computed by AstroDrizzle')
fobj.close()
|
def CreateServiceProto(job):
"""Create the Service protobuf.
Args:
job: Launchdjobdict from servicemanagement framework.
Returns:
sysinfo_pb2.OSXServiceInformation proto
"""
service = rdf_client.OSXServiceInformation(
label=job.get("Label"),
program=job.get("Program"),
sessiontype=job.get("LimitLoadToSessionType"),
lastexitstatus=int(job["LastExitStatus"]),
timeout=int(job["TimeOut"]),
ondemand=bool(job["OnDemand"]))
for arg in job.get("ProgramArguments", "", stringify=False):
# Returns CFArray of CFStrings
service.args.Append(str(arg))
mach_dict = job.get("MachServices", {}, stringify=False)
for key, value in iteritems(mach_dict):
service.machservice.Append("%s:%s" % (key, value))
job_mach_dict = job.get("PerJobMachServices", {}, stringify=False)
for key, value in iteritems(job_mach_dict):
service.perjobmachservice.Append("%s:%s" % (key, value))
if "PID" in job:
service.pid = job["PID"].value
return service
|
def function[CreateServiceProto, parameter[job]]:
constant[Create the Service protobuf.
Args:
job: Launchdjobdict from servicemanagement framework.
Returns:
sysinfo_pb2.OSXServiceInformation proto
]
variable[service] assign[=] call[name[rdf_client].OSXServiceInformation, parameter[]]
for taget[name[arg]] in starred[call[name[job].get, parameter[constant[ProgramArguments], constant[]]]] begin[:]
call[name[service].args.Append, parameter[call[name[str], parameter[name[arg]]]]]
variable[mach_dict] assign[=] call[name[job].get, parameter[constant[MachServices], dictionary[[], []]]]
for taget[tuple[[<ast.Name object at 0x7da1b1c0cd30>, <ast.Name object at 0x7da1b1c0e080>]]] in starred[call[name[iteritems], parameter[name[mach_dict]]]] begin[:]
call[name[service].machservice.Append, parameter[binary_operation[constant[%s:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1c0d3c0>, <ast.Name object at 0x7da1b1c0dd80>]]]]]
variable[job_mach_dict] assign[=] call[name[job].get, parameter[constant[PerJobMachServices], dictionary[[], []]]]
for taget[tuple[[<ast.Name object at 0x7da1b1c0ed40>, <ast.Name object at 0x7da1b1c0e6e0>]]] in starred[call[name[iteritems], parameter[name[job_mach_dict]]]] begin[:]
call[name[service].perjobmachservice.Append, parameter[binary_operation[constant[%s:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1c0f490>, <ast.Name object at 0x7da1b1c0cc70>]]]]]
if compare[constant[PID] in name[job]] begin[:]
name[service].pid assign[=] call[name[job]][constant[PID]].value
return[name[service]]
|
keyword[def] identifier[CreateServiceProto] ( identifier[job] ):
literal[string]
identifier[service] = identifier[rdf_client] . identifier[OSXServiceInformation] (
identifier[label] = identifier[job] . identifier[get] ( literal[string] ),
identifier[program] = identifier[job] . identifier[get] ( literal[string] ),
identifier[sessiontype] = identifier[job] . identifier[get] ( literal[string] ),
identifier[lastexitstatus] = identifier[int] ( identifier[job] [ literal[string] ]),
identifier[timeout] = identifier[int] ( identifier[job] [ literal[string] ]),
identifier[ondemand] = identifier[bool] ( identifier[job] [ literal[string] ]))
keyword[for] identifier[arg] keyword[in] identifier[job] . identifier[get] ( literal[string] , literal[string] , identifier[stringify] = keyword[False] ):
identifier[service] . identifier[args] . identifier[Append] ( identifier[str] ( identifier[arg] ))
identifier[mach_dict] = identifier[job] . identifier[get] ( literal[string] ,{}, identifier[stringify] = keyword[False] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[iteritems] ( identifier[mach_dict] ):
identifier[service] . identifier[machservice] . identifier[Append] ( literal[string] %( identifier[key] , identifier[value] ))
identifier[job_mach_dict] = identifier[job] . identifier[get] ( literal[string] ,{}, identifier[stringify] = keyword[False] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[iteritems] ( identifier[job_mach_dict] ):
identifier[service] . identifier[perjobmachservice] . identifier[Append] ( literal[string] %( identifier[key] , identifier[value] ))
keyword[if] literal[string] keyword[in] identifier[job] :
identifier[service] . identifier[pid] = identifier[job] [ literal[string] ]. identifier[value]
keyword[return] identifier[service]
|
def CreateServiceProto(job):
"""Create the Service protobuf.
Args:
job: Launchdjobdict from servicemanagement framework.
Returns:
sysinfo_pb2.OSXServiceInformation proto
"""
service = rdf_client.OSXServiceInformation(label=job.get('Label'), program=job.get('Program'), sessiontype=job.get('LimitLoadToSessionType'), lastexitstatus=int(job['LastExitStatus']), timeout=int(job['TimeOut']), ondemand=bool(job['OnDemand']))
for arg in job.get('ProgramArguments', '', stringify=False):
# Returns CFArray of CFStrings
service.args.Append(str(arg)) # depends on [control=['for'], data=['arg']]
mach_dict = job.get('MachServices', {}, stringify=False)
for (key, value) in iteritems(mach_dict):
service.machservice.Append('%s:%s' % (key, value)) # depends on [control=['for'], data=[]]
job_mach_dict = job.get('PerJobMachServices', {}, stringify=False)
for (key, value) in iteritems(job_mach_dict):
service.perjobmachservice.Append('%s:%s' % (key, value)) # depends on [control=['for'], data=[]]
if 'PID' in job:
service.pid = job['PID'].value # depends on [control=['if'], data=['job']]
return service
|
def get_file_info_by_id(self, id):
"""
Given an id, get the corresponding file info as the following:\n
(relative path joined with file name, file info dict)
Parameters:
#. id (string): The file unique id string.
:Returns:
#. relativePath (string): The file relative path joined with file name.
If None, it means file was not found.
#. info (None, dictionary): The file information dictionary.
If None, it means file was not found.
"""
for path, info in self.walk_files_info():
if info['id']==id:
return path, info
# none was found
return None, None
|
def function[get_file_info_by_id, parameter[self, id]]:
constant[
Given an id, get the corresponding file info as the following:
(relative path joined with file name, file info dict)
Parameters:
#. id (string): The file unique id string.
:Returns:
#. relativePath (string): The file relative path joined with file name.
If None, it means file was not found.
#. info (None, dictionary): The file information dictionary.
If None, it means file was not found.
]
for taget[tuple[[<ast.Name object at 0x7da2044c3880>, <ast.Name object at 0x7da2044c03a0>]]] in starred[call[name[self].walk_files_info, parameter[]]] begin[:]
if compare[call[name[info]][constant[id]] equal[==] name[id]] begin[:]
return[tuple[[<ast.Name object at 0x7da204564b20>, <ast.Name object at 0x7da204566f20>]]]
return[tuple[[<ast.Constant object at 0x7da204565570>, <ast.Constant object at 0x7da2045659c0>]]]
|
keyword[def] identifier[get_file_info_by_id] ( identifier[self] , identifier[id] ):
literal[string]
keyword[for] identifier[path] , identifier[info] keyword[in] identifier[self] . identifier[walk_files_info] ():
keyword[if] identifier[info] [ literal[string] ]== identifier[id] :
keyword[return] identifier[path] , identifier[info]
keyword[return] keyword[None] , keyword[None]
|
def get_file_info_by_id(self, id):
"""
Given an id, get the corresponding file info as the following:
(relative path joined with file name, file info dict)
Parameters:
#. id (string): The file unique id string.
:Returns:
#. relativePath (string): The file relative path joined with file name.
If None, it means file was not found.
#. info (None, dictionary): The file information dictionary.
If None, it means file was not found.
"""
for (path, info) in self.walk_files_info():
if info['id'] == id:
return (path, info) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# none was found
return (None, None)
|
def configure(self):
"""
Applies one-time settings changes to the host, usually to initialize the service.
"""
print('env.services:', self.genv.services)
for service in list(self.genv.services):
service = service.strip().upper()
funcs = common.service_configurators.get(service, [])
if funcs:
print('!'*80)
print('Configuring service %s...' % (service,))
for func in funcs:
print('Function:', func)
if not self.dryrun:
func()
|
def function[configure, parameter[self]]:
constant[
Applies one-time settings changes to the host, usually to initialize the service.
]
call[name[print], parameter[constant[env.services:], name[self].genv.services]]
for taget[name[service]] in starred[call[name[list], parameter[name[self].genv.services]]] begin[:]
variable[service] assign[=] call[call[name[service].strip, parameter[]].upper, parameter[]]
variable[funcs] assign[=] call[name[common].service_configurators.get, parameter[name[service], list[[]]]]
if name[funcs] begin[:]
call[name[print], parameter[binary_operation[constant[!] * constant[80]]]]
call[name[print], parameter[binary_operation[constant[Configuring service %s...] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0049c30>]]]]]
for taget[name[func]] in starred[name[funcs]] begin[:]
call[name[print], parameter[constant[Function:], name[func]]]
if <ast.UnaryOp object at 0x7da1b0049000> begin[:]
call[name[func], parameter[]]
|
keyword[def] identifier[configure] ( identifier[self] ):
literal[string]
identifier[print] ( literal[string] , identifier[self] . identifier[genv] . identifier[services] )
keyword[for] identifier[service] keyword[in] identifier[list] ( identifier[self] . identifier[genv] . identifier[services] ):
identifier[service] = identifier[service] . identifier[strip] (). identifier[upper] ()
identifier[funcs] = identifier[common] . identifier[service_configurators] . identifier[get] ( identifier[service] ,[])
keyword[if] identifier[funcs] :
identifier[print] ( literal[string] * literal[int] )
identifier[print] ( literal[string] %( identifier[service] ,))
keyword[for] identifier[func] keyword[in] identifier[funcs] :
identifier[print] ( literal[string] , identifier[func] )
keyword[if] keyword[not] identifier[self] . identifier[dryrun] :
identifier[func] ()
|
def configure(self):
"""
Applies one-time settings changes to the host, usually to initialize the service.
"""
print('env.services:', self.genv.services)
for service in list(self.genv.services):
service = service.strip().upper()
funcs = common.service_configurators.get(service, [])
if funcs:
print('!' * 80)
print('Configuring service %s...' % (service,))
for func in funcs:
print('Function:', func)
if not self.dryrun:
func() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['func']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['service']]
|
def get_work_unit_status(self, work_spec_name, work_unit_key):
'''Get a high-level status for some work unit.
The return value is a dictionary. The only required key is
``status``, which could be any of:
``missing``
The work unit does not exist anywhere
``available``
The work unit is available for new workers; additional
keys include ``expiration`` (may be 0)
``pending``
The work unit is being worked on; additional keys include
``expiration`` and ``worker_id`` (usually)
``blocked``
The work unit is waiting for some other work units to finish;
additional keys include ``depends_on``
``finished``
The work unit has completed
``failed``
The work unit failed; additional keys include ``traceback``
:param str work_spec_name: name of the work spec
:param str work_unit_name: name of the work unit
:return: dictionary description of summary status
'''
with self.registry.lock(identifier=self.worker_id) as session:
# In the available list?
(unit,priority) = session.get(WORK_UNITS_ + work_spec_name,
work_unit_key, include_priority=True)
if unit:
result = {}
if priority < time.time():
result['status'] = 'available'
else:
result['status'] = 'pending'
result['expiration'] = priority
# ...is anyone working on it?
worker = session.get(WORK_UNITS_ + work_spec_name + "_locks",
work_unit_key)
if worker:
result['worker_id'] = worker
return result
# In the finished list?
unit = session.get(WORK_UNITS_ + work_spec_name + _FINISHED,
work_unit_key)
if unit:
return { 'status': 'finished' }
# In the failed list?
unit = session.get(WORK_UNITS_ + work_spec_name + _FAILED,
work_unit_key)
if unit:
result = { 'status': 'failed' }
if 'traceback' in unit:
result['traceback'] = unit['traceback']
return result
# In the blocked list?
unit = session.get(WORK_UNITS_ + work_spec_name + _BLOCKED,
work_unit_key)
if unit:
# This should always have *something*, right?
deps = session.get(WORK_UNITS_ + work_spec_name + _DEPENDS,
work_unit_key, default=[])
result = { 'status': 'blocked',
'depends_on': deps }
return result
return { 'status': 'missing' }
|
def function[get_work_unit_status, parameter[self, work_spec_name, work_unit_key]]:
constant[Get a high-level status for some work unit.
The return value is a dictionary. The only required key is
``status``, which could be any of:
``missing``
The work unit does not exist anywhere
``available``
The work unit is available for new workers; additional
keys include ``expiration`` (may be 0)
``pending``
The work unit is being worked on; additional keys include
``expiration`` and ``worker_id`` (usually)
``blocked``
The work unit is waiting for some other work units to finish;
additional keys include ``depends_on``
``finished``
The work unit has completed
``failed``
The work unit failed; additional keys include ``traceback``
:param str work_spec_name: name of the work spec
:param str work_unit_name: name of the work unit
:return: dictionary description of summary status
]
with call[name[self].registry.lock, parameter[]] begin[:]
<ast.Tuple object at 0x7da1b146dd50> assign[=] call[name[session].get, parameter[binary_operation[name[WORK_UNITS_] + name[work_spec_name]], name[work_unit_key]]]
if name[unit] begin[:]
variable[result] assign[=] dictionary[[], []]
if compare[name[priority] less[<] call[name[time].time, parameter[]]] begin[:]
call[name[result]][constant[status]] assign[=] constant[available]
call[name[result]][constant[expiration]] assign[=] name[priority]
variable[worker] assign[=] call[name[session].get, parameter[binary_operation[binary_operation[name[WORK_UNITS_] + name[work_spec_name]] + constant[_locks]], name[work_unit_key]]]
if name[worker] begin[:]
call[name[result]][constant[worker_id]] assign[=] name[worker]
return[name[result]]
variable[unit] assign[=] call[name[session].get, parameter[binary_operation[binary_operation[name[WORK_UNITS_] + name[work_spec_name]] + name[_FINISHED]], name[work_unit_key]]]
if name[unit] begin[:]
return[dictionary[[<ast.Constant object at 0x7da1b146d090>], [<ast.Constant object at 0x7da1b146f610>]]]
variable[unit] assign[=] call[name[session].get, parameter[binary_operation[binary_operation[name[WORK_UNITS_] + name[work_spec_name]] + name[_FAILED]], name[work_unit_key]]]
if name[unit] begin[:]
variable[result] assign[=] dictionary[[<ast.Constant object at 0x7da1b146c2e0>], [<ast.Constant object at 0x7da1b146fa30>]]
if compare[constant[traceback] in name[unit]] begin[:]
call[name[result]][constant[traceback]] assign[=] call[name[unit]][constant[traceback]]
return[name[result]]
variable[unit] assign[=] call[name[session].get, parameter[binary_operation[binary_operation[name[WORK_UNITS_] + name[work_spec_name]] + name[_BLOCKED]], name[work_unit_key]]]
if name[unit] begin[:]
variable[deps] assign[=] call[name[session].get, parameter[binary_operation[binary_operation[name[WORK_UNITS_] + name[work_spec_name]] + name[_DEPENDS]], name[work_unit_key]]]
variable[result] assign[=] dictionary[[<ast.Constant object at 0x7da1b146e560>, <ast.Constant object at 0x7da1b146e5f0>], [<ast.Constant object at 0x7da1b146f7f0>, <ast.Name object at 0x7da1b146f970>]]
return[name[result]]
return[dictionary[[<ast.Constant object at 0x7da1b14624d0>], [<ast.Constant object at 0x7da1b1462350>]]]
|
keyword[def] identifier[get_work_unit_status] ( identifier[self] , identifier[work_spec_name] , identifier[work_unit_key] ):
literal[string]
keyword[with] identifier[self] . identifier[registry] . identifier[lock] ( identifier[identifier] = identifier[self] . identifier[worker_id] ) keyword[as] identifier[session] :
( identifier[unit] , identifier[priority] )= identifier[session] . identifier[get] ( identifier[WORK_UNITS_] + identifier[work_spec_name] ,
identifier[work_unit_key] , identifier[include_priority] = keyword[True] )
keyword[if] identifier[unit] :
identifier[result] ={}
keyword[if] identifier[priority] < identifier[time] . identifier[time] ():
identifier[result] [ literal[string] ]= literal[string]
keyword[else] :
identifier[result] [ literal[string] ]= literal[string]
identifier[result] [ literal[string] ]= identifier[priority]
identifier[worker] = identifier[session] . identifier[get] ( identifier[WORK_UNITS_] + identifier[work_spec_name] + literal[string] ,
identifier[work_unit_key] )
keyword[if] identifier[worker] :
identifier[result] [ literal[string] ]= identifier[worker]
keyword[return] identifier[result]
identifier[unit] = identifier[session] . identifier[get] ( identifier[WORK_UNITS_] + identifier[work_spec_name] + identifier[_FINISHED] ,
identifier[work_unit_key] )
keyword[if] identifier[unit] :
keyword[return] { literal[string] : literal[string] }
identifier[unit] = identifier[session] . identifier[get] ( identifier[WORK_UNITS_] + identifier[work_spec_name] + identifier[_FAILED] ,
identifier[work_unit_key] )
keyword[if] identifier[unit] :
identifier[result] ={ literal[string] : literal[string] }
keyword[if] literal[string] keyword[in] identifier[unit] :
identifier[result] [ literal[string] ]= identifier[unit] [ literal[string] ]
keyword[return] identifier[result]
identifier[unit] = identifier[session] . identifier[get] ( identifier[WORK_UNITS_] + identifier[work_spec_name] + identifier[_BLOCKED] ,
identifier[work_unit_key] )
keyword[if] identifier[unit] :
identifier[deps] = identifier[session] . identifier[get] ( identifier[WORK_UNITS_] + identifier[work_spec_name] + identifier[_DEPENDS] ,
identifier[work_unit_key] , identifier[default] =[])
identifier[result] ={ literal[string] : literal[string] ,
literal[string] : identifier[deps] }
keyword[return] identifier[result]
keyword[return] { literal[string] : literal[string] }
|
def get_work_unit_status(self, work_spec_name, work_unit_key):
"""Get a high-level status for some work unit.
The return value is a dictionary. The only required key is
``status``, which could be any of:
``missing``
The work unit does not exist anywhere
``available``
The work unit is available for new workers; additional
keys include ``expiration`` (may be 0)
``pending``
The work unit is being worked on; additional keys include
``expiration`` and ``worker_id`` (usually)
``blocked``
The work unit is waiting for some other work units to finish;
additional keys include ``depends_on``
``finished``
The work unit has completed
``failed``
The work unit failed; additional keys include ``traceback``
:param str work_spec_name: name of the work spec
:param str work_unit_name: name of the work unit
:return: dictionary description of summary status
"""
with self.registry.lock(identifier=self.worker_id) as session:
# In the available list?
(unit, priority) = session.get(WORK_UNITS_ + work_spec_name, work_unit_key, include_priority=True)
if unit:
result = {}
if priority < time.time():
result['status'] = 'available' # depends on [control=['if'], data=[]]
else:
result['status'] = 'pending'
result['expiration'] = priority
# ...is anyone working on it?
worker = session.get(WORK_UNITS_ + work_spec_name + '_locks', work_unit_key)
if worker:
result['worker_id'] = worker # depends on [control=['if'], data=[]]
return result # depends on [control=['if'], data=[]]
# In the finished list?
unit = session.get(WORK_UNITS_ + work_spec_name + _FINISHED, work_unit_key)
if unit:
return {'status': 'finished'} # depends on [control=['if'], data=[]]
# In the failed list?
unit = session.get(WORK_UNITS_ + work_spec_name + _FAILED, work_unit_key)
if unit:
result = {'status': 'failed'}
if 'traceback' in unit:
result['traceback'] = unit['traceback'] # depends on [control=['if'], data=['unit']]
return result # depends on [control=['if'], data=[]]
# In the blocked list?
unit = session.get(WORK_UNITS_ + work_spec_name + _BLOCKED, work_unit_key)
if unit:
# This should always have *something*, right?
deps = session.get(WORK_UNITS_ + work_spec_name + _DEPENDS, work_unit_key, default=[])
result = {'status': 'blocked', 'depends_on': deps}
return result # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['session']]
return {'status': 'missing'}
|
def setStop(self, vehID, edgeID, pos=1., laneIndex=0, duration=2**31 - 1,
flags=tc.STOP_DEFAULT, startPos=tc.INVALID_DOUBLE_VALUE, until=-1):
"""setStop(string, string, double, integer, integer, integer, double, integer) -> None
Adds or modifies a stop with the given parameters. The duration and the until attribute are
in milliseconds.
"""
self._connection._beginMessage(tc.CMD_SET_VEHICLE_VARIABLE, tc.CMD_STOP,
vehID, 1 + 4 + 1 + 4 + len(edgeID) + 1 + 8 + 1 + 1 + 1 + 4 + 1 + 1 + 1 + 8 + 1 + 4)
self._connection._string += struct.pack("!Bi", tc.TYPE_COMPOUND, 7)
self._connection._packString(edgeID)
self._connection._string += struct.pack("!BdBBBiBB", tc.TYPE_DOUBLE, pos,
tc.TYPE_BYTE, laneIndex, tc.TYPE_INTEGER, duration, tc.TYPE_BYTE, flags)
self._connection._string += struct.pack("!BdBi",
tc.TYPE_DOUBLE, startPos, tc.TYPE_INTEGER, until)
self._connection._sendExact()
|
def function[setStop, parameter[self, vehID, edgeID, pos, laneIndex, duration, flags, startPos, until]]:
constant[setStop(string, string, double, integer, integer, integer, double, integer) -> None
Adds or modifies a stop with the given parameters. The duration and the until attribute are
in milliseconds.
]
call[name[self]._connection._beginMessage, parameter[name[tc].CMD_SET_VEHICLE_VARIABLE, name[tc].CMD_STOP, name[vehID], binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[1] + constant[4]] + constant[1]] + constant[4]] + call[name[len], parameter[name[edgeID]]]] + constant[1]] + constant[8]] + constant[1]] + constant[1]] + constant[1]] + constant[4]] + constant[1]] + constant[1]] + constant[1]] + constant[8]] + constant[1]] + constant[4]]]]
<ast.AugAssign object at 0x7da1b0910e50>
call[name[self]._connection._packString, parameter[name[edgeID]]]
<ast.AugAssign object at 0x7da1b0912a70>
<ast.AugAssign object at 0x7da1b09120e0>
call[name[self]._connection._sendExact, parameter[]]
|
keyword[def] identifier[setStop] ( identifier[self] , identifier[vehID] , identifier[edgeID] , identifier[pos] = literal[int] , identifier[laneIndex] = literal[int] , identifier[duration] = literal[int] ** literal[int] - literal[int] ,
identifier[flags] = identifier[tc] . identifier[STOP_DEFAULT] , identifier[startPos] = identifier[tc] . identifier[INVALID_DOUBLE_VALUE] , identifier[until] =- literal[int] ):
literal[string]
identifier[self] . identifier[_connection] . identifier[_beginMessage] ( identifier[tc] . identifier[CMD_SET_VEHICLE_VARIABLE] , identifier[tc] . identifier[CMD_STOP] ,
identifier[vehID] , literal[int] + literal[int] + literal[int] + literal[int] + identifier[len] ( identifier[edgeID] )+ literal[int] + literal[int] + literal[int] + literal[int] + literal[int] + literal[int] + literal[int] + literal[int] + literal[int] + literal[int] + literal[int] + literal[int] )
identifier[self] . identifier[_connection] . identifier[_string] += identifier[struct] . identifier[pack] ( literal[string] , identifier[tc] . identifier[TYPE_COMPOUND] , literal[int] )
identifier[self] . identifier[_connection] . identifier[_packString] ( identifier[edgeID] )
identifier[self] . identifier[_connection] . identifier[_string] += identifier[struct] . identifier[pack] ( literal[string] , identifier[tc] . identifier[TYPE_DOUBLE] , identifier[pos] ,
identifier[tc] . identifier[TYPE_BYTE] , identifier[laneIndex] , identifier[tc] . identifier[TYPE_INTEGER] , identifier[duration] , identifier[tc] . identifier[TYPE_BYTE] , identifier[flags] )
identifier[self] . identifier[_connection] . identifier[_string] += identifier[struct] . identifier[pack] ( literal[string] ,
identifier[tc] . identifier[TYPE_DOUBLE] , identifier[startPos] , identifier[tc] . identifier[TYPE_INTEGER] , identifier[until] )
identifier[self] . identifier[_connection] . identifier[_sendExact] ()
|
def setStop(self, vehID, edgeID, pos=1.0, laneIndex=0, duration=2 ** 31 - 1, flags=tc.STOP_DEFAULT, startPos=tc.INVALID_DOUBLE_VALUE, until=-1):
"""setStop(string, string, double, integer, integer, integer, double, integer) -> None
Adds or modifies a stop with the given parameters. The duration and the until attribute are
in milliseconds.
"""
self._connection._beginMessage(tc.CMD_SET_VEHICLE_VARIABLE, tc.CMD_STOP, vehID, 1 + 4 + 1 + 4 + len(edgeID) + 1 + 8 + 1 + 1 + 1 + 4 + 1 + 1 + 1 + 8 + 1 + 4)
self._connection._string += struct.pack('!Bi', tc.TYPE_COMPOUND, 7)
self._connection._packString(edgeID)
self._connection._string += struct.pack('!BdBBBiBB', tc.TYPE_DOUBLE, pos, tc.TYPE_BYTE, laneIndex, tc.TYPE_INTEGER, duration, tc.TYPE_BYTE, flags)
self._connection._string += struct.pack('!BdBi', tc.TYPE_DOUBLE, startPos, tc.TYPE_INTEGER, until)
self._connection._sendExact()
|
def beforeRender(self, ctx):
"""
Call the C{beforeRender} implementations on L{MantissaLivePage} and
L{_FragmentWrapperMixin}.
"""
MantissaLivePage.beforeRender(self, ctx)
return _FragmentWrapperMixin.beforeRender(self, ctx)
|
def function[beforeRender, parameter[self, ctx]]:
constant[
Call the C{beforeRender} implementations on L{MantissaLivePage} and
L{_FragmentWrapperMixin}.
]
call[name[MantissaLivePage].beforeRender, parameter[name[self], name[ctx]]]
return[call[name[_FragmentWrapperMixin].beforeRender, parameter[name[self], name[ctx]]]]
|
keyword[def] identifier[beforeRender] ( identifier[self] , identifier[ctx] ):
literal[string]
identifier[MantissaLivePage] . identifier[beforeRender] ( identifier[self] , identifier[ctx] )
keyword[return] identifier[_FragmentWrapperMixin] . identifier[beforeRender] ( identifier[self] , identifier[ctx] )
|
def beforeRender(self, ctx):
"""
Call the C{beforeRender} implementations on L{MantissaLivePage} and
L{_FragmentWrapperMixin}.
"""
MantissaLivePage.beforeRender(self, ctx)
return _FragmentWrapperMixin.beforeRender(self, ctx)
|
def gen_fibonacci():
"""Yield the next Fibonacci number.
Based on https://www.python-course.eu/generators.php
Starts at Fibonacci number 3 (the second 1)
Yields
------
int
The next Fibonacci number
"""
num_a, num_b = 1, 2
while True:
yield num_a
num_a, num_b = num_b, num_a + num_b
|
def function[gen_fibonacci, parameter[]]:
constant[Yield the next Fibonacci number.
Based on https://www.python-course.eu/generators.php
Starts at Fibonacci number 3 (the second 1)
Yields
------
int
The next Fibonacci number
]
<ast.Tuple object at 0x7da1b0150430> assign[=] tuple[[<ast.Constant object at 0x7da1b0152ad0>, <ast.Constant object at 0x7da1b0150040>]]
while constant[True] begin[:]
<ast.Yield object at 0x7da1b0150100>
<ast.Tuple object at 0x7da1b01503a0> assign[=] tuple[[<ast.Name object at 0x7da1b0153820>, <ast.BinOp object at 0x7da1b0151990>]]
|
keyword[def] identifier[gen_fibonacci] ():
literal[string]
identifier[num_a] , identifier[num_b] = literal[int] , literal[int]
keyword[while] keyword[True] :
keyword[yield] identifier[num_a]
identifier[num_a] , identifier[num_b] = identifier[num_b] , identifier[num_a] + identifier[num_b]
|
def gen_fibonacci():
"""Yield the next Fibonacci number.
Based on https://www.python-course.eu/generators.php
Starts at Fibonacci number 3 (the second 1)
Yields
------
int
The next Fibonacci number
"""
(num_a, num_b) = (1, 2)
while True:
yield num_a
(num_a, num_b) = (num_b, num_a + num_b) # depends on [control=['while'], data=[]]
|
def from_rep(u):
"""Given a string, return a UUID object."""
if isinstance(u, pyversion.string_types):
return uuid.UUID(u)
# hack to remove signs
a = ctypes.c_ulong(u[0])
b = ctypes.c_ulong(u[1])
combined = a.value << 64 | b.value
return uuid.UUID(int=combined)
|
def function[from_rep, parameter[u]]:
constant[Given a string, return a UUID object.]
if call[name[isinstance], parameter[name[u], name[pyversion].string_types]] begin[:]
return[call[name[uuid].UUID, parameter[name[u]]]]
variable[a] assign[=] call[name[ctypes].c_ulong, parameter[call[name[u]][constant[0]]]]
variable[b] assign[=] call[name[ctypes].c_ulong, parameter[call[name[u]][constant[1]]]]
variable[combined] assign[=] binary_operation[binary_operation[name[a].value <ast.LShift object at 0x7da2590d69e0> constant[64]] <ast.BitOr object at 0x7da2590d6aa0> name[b].value]
return[call[name[uuid].UUID, parameter[]]]
|
keyword[def] identifier[from_rep] ( identifier[u] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[u] , identifier[pyversion] . identifier[string_types] ):
keyword[return] identifier[uuid] . identifier[UUID] ( identifier[u] )
identifier[a] = identifier[ctypes] . identifier[c_ulong] ( identifier[u] [ literal[int] ])
identifier[b] = identifier[ctypes] . identifier[c_ulong] ( identifier[u] [ literal[int] ])
identifier[combined] = identifier[a] . identifier[value] << literal[int] | identifier[b] . identifier[value]
keyword[return] identifier[uuid] . identifier[UUID] ( identifier[int] = identifier[combined] )
|
def from_rep(u):
"""Given a string, return a UUID object."""
if isinstance(u, pyversion.string_types):
return uuid.UUID(u) # depends on [control=['if'], data=[]]
# hack to remove signs
a = ctypes.c_ulong(u[0])
b = ctypes.c_ulong(u[1])
combined = a.value << 64 | b.value
return uuid.UUID(int=combined)
|
def get_alias_func(self):
"""The get_alias function is set by a few different things:
- if there is a 'generate_alias_name' macro in the root project,
it will be used.
- if that does not exist but there is a 'generate_alias_name'
macro in the 'dbt' internal project, that will be used
- if neither of those exist (unit tests?), a function that returns
the 'default alias' as set in the model's filename or alias
configuration.
"""
if self._get_alias_func is not None:
return self._get_alias_func
get_alias_macro = self.macro_manifest.find_macro_by_name(
'generate_alias_name',
self.root_project_config.project_name
)
if get_alias_macro is None:
get_alias_macro = self.macro_manifest.find_macro_by_name(
'generate_alias_name',
GLOBAL_PROJECT_NAME
)
if get_alias_macro is None:
def get_alias(node, custom_alias_name=None):
if custom_alias_name is None:
return node.name
else:
return custom_alias_name
else:
root_context = dbt.context.parser.generate_macro(
get_alias_macro, self.root_project_config,
self.macro_manifest
)
get_alias = get_alias_macro.generator(root_context)
self._get_alias_func = get_alias
return self._get_alias_func
|
def function[get_alias_func, parameter[self]]:
constant[The get_alias function is set by a few different things:
- if there is a 'generate_alias_name' macro in the root project,
it will be used.
- if that does not exist but there is a 'generate_alias_name'
macro in the 'dbt' internal project, that will be used
- if neither of those exist (unit tests?), a function that returns
the 'default alias' as set in the model's filename or alias
configuration.
]
if compare[name[self]._get_alias_func is_not constant[None]] begin[:]
return[name[self]._get_alias_func]
variable[get_alias_macro] assign[=] call[name[self].macro_manifest.find_macro_by_name, parameter[constant[generate_alias_name], name[self].root_project_config.project_name]]
if compare[name[get_alias_macro] is constant[None]] begin[:]
variable[get_alias_macro] assign[=] call[name[self].macro_manifest.find_macro_by_name, parameter[constant[generate_alias_name], name[GLOBAL_PROJECT_NAME]]]
if compare[name[get_alias_macro] is constant[None]] begin[:]
def function[get_alias, parameter[node, custom_alias_name]]:
if compare[name[custom_alias_name] is constant[None]] begin[:]
return[name[node].name]
name[self]._get_alias_func assign[=] name[get_alias]
return[name[self]._get_alias_func]
|
keyword[def] identifier[get_alias_func] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_get_alias_func] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[_get_alias_func]
identifier[get_alias_macro] = identifier[self] . identifier[macro_manifest] . identifier[find_macro_by_name] (
literal[string] ,
identifier[self] . identifier[root_project_config] . identifier[project_name]
)
keyword[if] identifier[get_alias_macro] keyword[is] keyword[None] :
identifier[get_alias_macro] = identifier[self] . identifier[macro_manifest] . identifier[find_macro_by_name] (
literal[string] ,
identifier[GLOBAL_PROJECT_NAME]
)
keyword[if] identifier[get_alias_macro] keyword[is] keyword[None] :
keyword[def] identifier[get_alias] ( identifier[node] , identifier[custom_alias_name] = keyword[None] ):
keyword[if] identifier[custom_alias_name] keyword[is] keyword[None] :
keyword[return] identifier[node] . identifier[name]
keyword[else] :
keyword[return] identifier[custom_alias_name]
keyword[else] :
identifier[root_context] = identifier[dbt] . identifier[context] . identifier[parser] . identifier[generate_macro] (
identifier[get_alias_macro] , identifier[self] . identifier[root_project_config] ,
identifier[self] . identifier[macro_manifest]
)
identifier[get_alias] = identifier[get_alias_macro] . identifier[generator] ( identifier[root_context] )
identifier[self] . identifier[_get_alias_func] = identifier[get_alias]
keyword[return] identifier[self] . identifier[_get_alias_func]
|
def get_alias_func(self):
"""The get_alias function is set by a few different things:
- if there is a 'generate_alias_name' macro in the root project,
it will be used.
- if that does not exist but there is a 'generate_alias_name'
macro in the 'dbt' internal project, that will be used
- if neither of those exist (unit tests?), a function that returns
the 'default alias' as set in the model's filename or alias
configuration.
"""
if self._get_alias_func is not None:
return self._get_alias_func # depends on [control=['if'], data=[]]
get_alias_macro = self.macro_manifest.find_macro_by_name('generate_alias_name', self.root_project_config.project_name)
if get_alias_macro is None:
get_alias_macro = self.macro_manifest.find_macro_by_name('generate_alias_name', GLOBAL_PROJECT_NAME) # depends on [control=['if'], data=['get_alias_macro']]
if get_alias_macro is None:
def get_alias(node, custom_alias_name=None):
if custom_alias_name is None:
return node.name # depends on [control=['if'], data=[]]
else:
return custom_alias_name # depends on [control=['if'], data=[]]
else:
root_context = dbt.context.parser.generate_macro(get_alias_macro, self.root_project_config, self.macro_manifest)
get_alias = get_alias_macro.generator(root_context)
self._get_alias_func = get_alias
return self._get_alias_func
|
def set_cors_headers(resp, options):
"""
Performs the actual evaluation of Flas-CORS options and actually
modifies the response object.
This function is used both in the decorator and the after_request
callback
"""
# If CORS has already been evaluated via the decorator, skip
if hasattr(resp, FLASK_CORS_EVALUATED):
LOG.debug('CORS have been already evaluated, skipping')
return resp
# Some libraries, like OAuthlib, set resp.headers to non Multidict
# objects (Werkzeug Headers work as well). This is a problem because
# headers allow repeated values.
if (not isinstance(resp.headers, Headers)
and not isinstance(resp.headers, MultiDict)):
resp.headers = MultiDict(resp.headers)
headers_to_set = get_cors_headers(options, request.headers, request.method)
LOG.debug('Settings CORS headers: %s', str(headers_to_set))
for k, v in headers_to_set.items():
resp.headers.add(k, v)
return resp
|
def function[set_cors_headers, parameter[resp, options]]:
constant[
Performs the actual evaluation of Flas-CORS options and actually
modifies the response object.
This function is used both in the decorator and the after_request
callback
]
if call[name[hasattr], parameter[name[resp], name[FLASK_CORS_EVALUATED]]] begin[:]
call[name[LOG].debug, parameter[constant[CORS have been already evaluated, skipping]]]
return[name[resp]]
if <ast.BoolOp object at 0x7da1b0190310> begin[:]
name[resp].headers assign[=] call[name[MultiDict], parameter[name[resp].headers]]
variable[headers_to_set] assign[=] call[name[get_cors_headers], parameter[name[options], name[request].headers, name[request].method]]
call[name[LOG].debug, parameter[constant[Settings CORS headers: %s], call[name[str], parameter[name[headers_to_set]]]]]
for taget[tuple[[<ast.Name object at 0x7da1b0190cd0>, <ast.Name object at 0x7da1b0191cf0>]]] in starred[call[name[headers_to_set].items, parameter[]]] begin[:]
call[name[resp].headers.add, parameter[name[k], name[v]]]
return[name[resp]]
|
keyword[def] identifier[set_cors_headers] ( identifier[resp] , identifier[options] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[resp] , identifier[FLASK_CORS_EVALUATED] ):
identifier[LOG] . identifier[debug] ( literal[string] )
keyword[return] identifier[resp]
keyword[if] ( keyword[not] identifier[isinstance] ( identifier[resp] . identifier[headers] , identifier[Headers] )
keyword[and] keyword[not] identifier[isinstance] ( identifier[resp] . identifier[headers] , identifier[MultiDict] )):
identifier[resp] . identifier[headers] = identifier[MultiDict] ( identifier[resp] . identifier[headers] )
identifier[headers_to_set] = identifier[get_cors_headers] ( identifier[options] , identifier[request] . identifier[headers] , identifier[request] . identifier[method] )
identifier[LOG] . identifier[debug] ( literal[string] , identifier[str] ( identifier[headers_to_set] ))
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[headers_to_set] . identifier[items] ():
identifier[resp] . identifier[headers] . identifier[add] ( identifier[k] , identifier[v] )
keyword[return] identifier[resp]
|
def set_cors_headers(resp, options):
"""
Performs the actual evaluation of Flas-CORS options and actually
modifies the response object.
This function is used both in the decorator and the after_request
callback
"""
# If CORS has already been evaluated via the decorator, skip
if hasattr(resp, FLASK_CORS_EVALUATED):
LOG.debug('CORS have been already evaluated, skipping')
return resp # depends on [control=['if'], data=[]]
# Some libraries, like OAuthlib, set resp.headers to non Multidict
# objects (Werkzeug Headers work as well). This is a problem because
# headers allow repeated values.
if not isinstance(resp.headers, Headers) and (not isinstance(resp.headers, MultiDict)):
resp.headers = MultiDict(resp.headers) # depends on [control=['if'], data=[]]
headers_to_set = get_cors_headers(options, request.headers, request.method)
LOG.debug('Settings CORS headers: %s', str(headers_to_set))
for (k, v) in headers_to_set.items():
resp.headers.add(k, v) # depends on [control=['for'], data=[]]
return resp
|
def backward_inference(self, variables, evidence=None):
"""
Backward inference method using belief propagation.
Parameters:
----------
variables: list
list of variables for which you want to compute the probability
evidence: dict
a dict key, value pair as {var: state_of_var_observed}
None if no evidence
Examples:
--------
>>> from pgmpy.factors.discrete import TabularCPD
>>> from pgmpy.models import DynamicBayesianNetwork as DBN
>>> from pgmpy.inference import DBNInference
>>> dbnet = DBN()
>>> dbnet.add_edges_from([(('Z', 0), ('X', 0)), (('X', 0), ('Y', 0)),
... (('Z', 0), ('Z', 1))])
>>> z_start_cpd = TabularCPD(('Z', 0), 2, [[0.5, 0.5]])
>>> x_i_cpd = TabularCPD(('X', 0), 2, [[0.6, 0.9],
... [0.4, 0.1]],
... evidence=[('Z', 0)],
... evidence_card=[2])
>>> y_i_cpd = TabularCPD(('Y', 0), 2, [[0.2, 0.3],
... [0.8, 0.7]],
... evidence=[('X', 0)],
... evidence_card=[2])
>>> z_trans_cpd = TabularCPD(('Z', 1), 2, [[0.4, 0.7],
... [0.6, 0.3]],
... evidence=[('Z', 0)],
... evidence_card=[2])
>>> dbnet.add_cpds(z_start_cpd, z_trans_cpd, x_i_cpd, y_i_cpd)
>>> dbnet.initialize_initial_state()
>>> dbn_inf = DBNInference(dbnet)
>>> dbn_inf.backward_inference([('X', 0)], {('Y', 0):0, ('Y', 1):1, ('Y', 2):1})[('X', 0)].values
array([ 0.66594382, 0.33405618])
"""
variable_dict = defaultdict(list)
for var in variables:
variable_dict[var[1]].append(var)
time_range = max(variable_dict)
interface_nodes_dict = {}
if evidence:
evid_time_range = max([time_slice for var, time_slice in evidence.keys()])
time_range = max(time_range, evid_time_range)
end_bp = BeliefPropagation(self.start_junction_tree)
potential_dict = self.forward_inference(variables, evidence, 'potential')
update_factor = self._shift_factor(potential_dict[time_range], 1)
factor_values = {}
for time_slice in range(time_range, 0, -1):
evidence_time = self._get_evidence(evidence, time_slice, 1)
evidence_prev_time = self._get_evidence(evidence, time_slice - 1, 0)
if evidence_prev_time:
interface_nodes_dict = {k: v for k, v in evidence_prev_time.items() if k in self.interface_nodes_0}
if evidence_time:
evidence_time.update(interface_nodes_dict)
mid_bp = BeliefPropagation(self.one_and_half_junction_tree)
self._update_belief(mid_bp, self.in_clique, potential_dict[time_slice - 1])
forward_factor = self._shift_factor(potential_dict[time_slice], 1)
self._update_belief(mid_bp, self.out_clique, forward_factor, update_factor)
if variable_dict[time_slice]:
variable_time = self._shift_nodes(variable_dict[time_slice], 1)
new_values = mid_bp.query(variable_time, evidence=evidence_time, joint=False)
changed_values = {}
for key in new_values.keys():
new_key = (key[0], time_slice)
new_factor = DiscreteFactor([new_key], new_values[key].cardinality, new_values[key].values)
changed_values[new_key] = new_factor
factor_values.update(changed_values)
clique_phi = self._get_factor(mid_bp, evidence_time)
in_clique_phi = self._marginalize_factor(self.interface_nodes_0, clique_phi)
update_factor = self._shift_factor(in_clique_phi, 1)
out_clique_phi = self._shift_factor(update_factor, 0)
self._update_belief(end_bp, self.start_interface_clique, potential_dict[0], out_clique_phi)
evidence_0 = self._get_evidence(evidence, 0, 0)
if variable_dict[0]:
factor_values.update(end_bp.query(variable_dict[0], evidence_0, joint=False))
return factor_values
|
def function[backward_inference, parameter[self, variables, evidence]]:
constant[
Backward inference method using belief propagation.
Parameters:
----------
variables: list
list of variables for which you want to compute the probability
evidence: dict
a dict key, value pair as {var: state_of_var_observed}
None if no evidence
Examples:
--------
>>> from pgmpy.factors.discrete import TabularCPD
>>> from pgmpy.models import DynamicBayesianNetwork as DBN
>>> from pgmpy.inference import DBNInference
>>> dbnet = DBN()
>>> dbnet.add_edges_from([(('Z', 0), ('X', 0)), (('X', 0), ('Y', 0)),
... (('Z', 0), ('Z', 1))])
>>> z_start_cpd = TabularCPD(('Z', 0), 2, [[0.5, 0.5]])
>>> x_i_cpd = TabularCPD(('X', 0), 2, [[0.6, 0.9],
... [0.4, 0.1]],
... evidence=[('Z', 0)],
... evidence_card=[2])
>>> y_i_cpd = TabularCPD(('Y', 0), 2, [[0.2, 0.3],
... [0.8, 0.7]],
... evidence=[('X', 0)],
... evidence_card=[2])
>>> z_trans_cpd = TabularCPD(('Z', 1), 2, [[0.4, 0.7],
... [0.6, 0.3]],
... evidence=[('Z', 0)],
... evidence_card=[2])
>>> dbnet.add_cpds(z_start_cpd, z_trans_cpd, x_i_cpd, y_i_cpd)
>>> dbnet.initialize_initial_state()
>>> dbn_inf = DBNInference(dbnet)
>>> dbn_inf.backward_inference([('X', 0)], {('Y', 0):0, ('Y', 1):1, ('Y', 2):1})[('X', 0)].values
array([ 0.66594382, 0.33405618])
]
variable[variable_dict] assign[=] call[name[defaultdict], parameter[name[list]]]
for taget[name[var]] in starred[name[variables]] begin[:]
call[call[name[variable_dict]][call[name[var]][constant[1]]].append, parameter[name[var]]]
variable[time_range] assign[=] call[name[max], parameter[name[variable_dict]]]
variable[interface_nodes_dict] assign[=] dictionary[[], []]
if name[evidence] begin[:]
variable[evid_time_range] assign[=] call[name[max], parameter[<ast.ListComp object at 0x7da20c6a9510>]]
variable[time_range] assign[=] call[name[max], parameter[name[time_range], name[evid_time_range]]]
variable[end_bp] assign[=] call[name[BeliefPropagation], parameter[name[self].start_junction_tree]]
variable[potential_dict] assign[=] call[name[self].forward_inference, parameter[name[variables], name[evidence], constant[potential]]]
variable[update_factor] assign[=] call[name[self]._shift_factor, parameter[call[name[potential_dict]][name[time_range]], constant[1]]]
variable[factor_values] assign[=] dictionary[[], []]
for taget[name[time_slice]] in starred[call[name[range], parameter[name[time_range], constant[0], <ast.UnaryOp object at 0x7da20c6aa7a0>]]] begin[:]
variable[evidence_time] assign[=] call[name[self]._get_evidence, parameter[name[evidence], name[time_slice], constant[1]]]
variable[evidence_prev_time] assign[=] call[name[self]._get_evidence, parameter[name[evidence], binary_operation[name[time_slice] - constant[1]], constant[0]]]
if name[evidence_prev_time] begin[:]
variable[interface_nodes_dict] assign[=] <ast.DictComp object at 0x7da20c6abe50>
if name[evidence_time] begin[:]
call[name[evidence_time].update, parameter[name[interface_nodes_dict]]]
variable[mid_bp] assign[=] call[name[BeliefPropagation], parameter[name[self].one_and_half_junction_tree]]
call[name[self]._update_belief, parameter[name[mid_bp], name[self].in_clique, call[name[potential_dict]][binary_operation[name[time_slice] - constant[1]]]]]
variable[forward_factor] assign[=] call[name[self]._shift_factor, parameter[call[name[potential_dict]][name[time_slice]], constant[1]]]
call[name[self]._update_belief, parameter[name[mid_bp], name[self].out_clique, name[forward_factor], name[update_factor]]]
if call[name[variable_dict]][name[time_slice]] begin[:]
variable[variable_time] assign[=] call[name[self]._shift_nodes, parameter[call[name[variable_dict]][name[time_slice]], constant[1]]]
variable[new_values] assign[=] call[name[mid_bp].query, parameter[name[variable_time]]]
variable[changed_values] assign[=] dictionary[[], []]
for taget[name[key]] in starred[call[name[new_values].keys, parameter[]]] begin[:]
variable[new_key] assign[=] tuple[[<ast.Subscript object at 0x7da20c6a8520>, <ast.Name object at 0x7da20c6a8ee0>]]
variable[new_factor] assign[=] call[name[DiscreteFactor], parameter[list[[<ast.Name object at 0x7da18f813c70>]], call[name[new_values]][name[key]].cardinality, call[name[new_values]][name[key]].values]]
call[name[changed_values]][name[new_key]] assign[=] name[new_factor]
call[name[factor_values].update, parameter[name[changed_values]]]
variable[clique_phi] assign[=] call[name[self]._get_factor, parameter[name[mid_bp], name[evidence_time]]]
variable[in_clique_phi] assign[=] call[name[self]._marginalize_factor, parameter[name[self].interface_nodes_0, name[clique_phi]]]
variable[update_factor] assign[=] call[name[self]._shift_factor, parameter[name[in_clique_phi], constant[1]]]
variable[out_clique_phi] assign[=] call[name[self]._shift_factor, parameter[name[update_factor], constant[0]]]
call[name[self]._update_belief, parameter[name[end_bp], name[self].start_interface_clique, call[name[potential_dict]][constant[0]], name[out_clique_phi]]]
variable[evidence_0] assign[=] call[name[self]._get_evidence, parameter[name[evidence], constant[0], constant[0]]]
if call[name[variable_dict]][constant[0]] begin[:]
call[name[factor_values].update, parameter[call[name[end_bp].query, parameter[call[name[variable_dict]][constant[0]], name[evidence_0]]]]]
return[name[factor_values]]
|
keyword[def] identifier[backward_inference] ( identifier[self] , identifier[variables] , identifier[evidence] = keyword[None] ):
literal[string]
identifier[variable_dict] = identifier[defaultdict] ( identifier[list] )
keyword[for] identifier[var] keyword[in] identifier[variables] :
identifier[variable_dict] [ identifier[var] [ literal[int] ]]. identifier[append] ( identifier[var] )
identifier[time_range] = identifier[max] ( identifier[variable_dict] )
identifier[interface_nodes_dict] ={}
keyword[if] identifier[evidence] :
identifier[evid_time_range] = identifier[max] ([ identifier[time_slice] keyword[for] identifier[var] , identifier[time_slice] keyword[in] identifier[evidence] . identifier[keys] ()])
identifier[time_range] = identifier[max] ( identifier[time_range] , identifier[evid_time_range] )
identifier[end_bp] = identifier[BeliefPropagation] ( identifier[self] . identifier[start_junction_tree] )
identifier[potential_dict] = identifier[self] . identifier[forward_inference] ( identifier[variables] , identifier[evidence] , literal[string] )
identifier[update_factor] = identifier[self] . identifier[_shift_factor] ( identifier[potential_dict] [ identifier[time_range] ], literal[int] )
identifier[factor_values] ={}
keyword[for] identifier[time_slice] keyword[in] identifier[range] ( identifier[time_range] , literal[int] ,- literal[int] ):
identifier[evidence_time] = identifier[self] . identifier[_get_evidence] ( identifier[evidence] , identifier[time_slice] , literal[int] )
identifier[evidence_prev_time] = identifier[self] . identifier[_get_evidence] ( identifier[evidence] , identifier[time_slice] - literal[int] , literal[int] )
keyword[if] identifier[evidence_prev_time] :
identifier[interface_nodes_dict] ={ identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[evidence_prev_time] . identifier[items] () keyword[if] identifier[k] keyword[in] identifier[self] . identifier[interface_nodes_0] }
keyword[if] identifier[evidence_time] :
identifier[evidence_time] . identifier[update] ( identifier[interface_nodes_dict] )
identifier[mid_bp] = identifier[BeliefPropagation] ( identifier[self] . identifier[one_and_half_junction_tree] )
identifier[self] . identifier[_update_belief] ( identifier[mid_bp] , identifier[self] . identifier[in_clique] , identifier[potential_dict] [ identifier[time_slice] - literal[int] ])
identifier[forward_factor] = identifier[self] . identifier[_shift_factor] ( identifier[potential_dict] [ identifier[time_slice] ], literal[int] )
identifier[self] . identifier[_update_belief] ( identifier[mid_bp] , identifier[self] . identifier[out_clique] , identifier[forward_factor] , identifier[update_factor] )
keyword[if] identifier[variable_dict] [ identifier[time_slice] ]:
identifier[variable_time] = identifier[self] . identifier[_shift_nodes] ( identifier[variable_dict] [ identifier[time_slice] ], literal[int] )
identifier[new_values] = identifier[mid_bp] . identifier[query] ( identifier[variable_time] , identifier[evidence] = identifier[evidence_time] , identifier[joint] = keyword[False] )
identifier[changed_values] ={}
keyword[for] identifier[key] keyword[in] identifier[new_values] . identifier[keys] ():
identifier[new_key] =( identifier[key] [ literal[int] ], identifier[time_slice] )
identifier[new_factor] = identifier[DiscreteFactor] ([ identifier[new_key] ], identifier[new_values] [ identifier[key] ]. identifier[cardinality] , identifier[new_values] [ identifier[key] ]. identifier[values] )
identifier[changed_values] [ identifier[new_key] ]= identifier[new_factor]
identifier[factor_values] . identifier[update] ( identifier[changed_values] )
identifier[clique_phi] = identifier[self] . identifier[_get_factor] ( identifier[mid_bp] , identifier[evidence_time] )
identifier[in_clique_phi] = identifier[self] . identifier[_marginalize_factor] ( identifier[self] . identifier[interface_nodes_0] , identifier[clique_phi] )
identifier[update_factor] = identifier[self] . identifier[_shift_factor] ( identifier[in_clique_phi] , literal[int] )
identifier[out_clique_phi] = identifier[self] . identifier[_shift_factor] ( identifier[update_factor] , literal[int] )
identifier[self] . identifier[_update_belief] ( identifier[end_bp] , identifier[self] . identifier[start_interface_clique] , identifier[potential_dict] [ literal[int] ], identifier[out_clique_phi] )
identifier[evidence_0] = identifier[self] . identifier[_get_evidence] ( identifier[evidence] , literal[int] , literal[int] )
keyword[if] identifier[variable_dict] [ literal[int] ]:
identifier[factor_values] . identifier[update] ( identifier[end_bp] . identifier[query] ( identifier[variable_dict] [ literal[int] ], identifier[evidence_0] , identifier[joint] = keyword[False] ))
keyword[return] identifier[factor_values]
|
def backward_inference(self, variables, evidence=None):
"""
Backward inference method using belief propagation.
Parameters:
----------
variables: list
list of variables for which you want to compute the probability
evidence: dict
a dict key, value pair as {var: state_of_var_observed}
None if no evidence
Examples:
--------
>>> from pgmpy.factors.discrete import TabularCPD
>>> from pgmpy.models import DynamicBayesianNetwork as DBN
>>> from pgmpy.inference import DBNInference
>>> dbnet = DBN()
>>> dbnet.add_edges_from([(('Z', 0), ('X', 0)), (('X', 0), ('Y', 0)),
... (('Z', 0), ('Z', 1))])
>>> z_start_cpd = TabularCPD(('Z', 0), 2, [[0.5, 0.5]])
>>> x_i_cpd = TabularCPD(('X', 0), 2, [[0.6, 0.9],
... [0.4, 0.1]],
... evidence=[('Z', 0)],
... evidence_card=[2])
>>> y_i_cpd = TabularCPD(('Y', 0), 2, [[0.2, 0.3],
... [0.8, 0.7]],
... evidence=[('X', 0)],
... evidence_card=[2])
>>> z_trans_cpd = TabularCPD(('Z', 1), 2, [[0.4, 0.7],
... [0.6, 0.3]],
... evidence=[('Z', 0)],
... evidence_card=[2])
>>> dbnet.add_cpds(z_start_cpd, z_trans_cpd, x_i_cpd, y_i_cpd)
>>> dbnet.initialize_initial_state()
>>> dbn_inf = DBNInference(dbnet)
>>> dbn_inf.backward_inference([('X', 0)], {('Y', 0):0, ('Y', 1):1, ('Y', 2):1})[('X', 0)].values
array([ 0.66594382, 0.33405618])
"""
variable_dict = defaultdict(list)
for var in variables:
variable_dict[var[1]].append(var) # depends on [control=['for'], data=['var']]
time_range = max(variable_dict)
interface_nodes_dict = {}
if evidence:
evid_time_range = max([time_slice for (var, time_slice) in evidence.keys()])
time_range = max(time_range, evid_time_range) # depends on [control=['if'], data=[]]
end_bp = BeliefPropagation(self.start_junction_tree)
potential_dict = self.forward_inference(variables, evidence, 'potential')
update_factor = self._shift_factor(potential_dict[time_range], 1)
factor_values = {}
for time_slice in range(time_range, 0, -1):
evidence_time = self._get_evidence(evidence, time_slice, 1)
evidence_prev_time = self._get_evidence(evidence, time_slice - 1, 0)
if evidence_prev_time:
interface_nodes_dict = {k: v for (k, v) in evidence_prev_time.items() if k in self.interface_nodes_0} # depends on [control=['if'], data=[]]
if evidence_time:
evidence_time.update(interface_nodes_dict) # depends on [control=['if'], data=[]]
mid_bp = BeliefPropagation(self.one_and_half_junction_tree)
self._update_belief(mid_bp, self.in_clique, potential_dict[time_slice - 1])
forward_factor = self._shift_factor(potential_dict[time_slice], 1)
self._update_belief(mid_bp, self.out_clique, forward_factor, update_factor)
if variable_dict[time_slice]:
variable_time = self._shift_nodes(variable_dict[time_slice], 1)
new_values = mid_bp.query(variable_time, evidence=evidence_time, joint=False)
changed_values = {}
for key in new_values.keys():
new_key = (key[0], time_slice)
new_factor = DiscreteFactor([new_key], new_values[key].cardinality, new_values[key].values)
changed_values[new_key] = new_factor # depends on [control=['for'], data=['key']]
factor_values.update(changed_values) # depends on [control=['if'], data=[]]
clique_phi = self._get_factor(mid_bp, evidence_time)
in_clique_phi = self._marginalize_factor(self.interface_nodes_0, clique_phi)
update_factor = self._shift_factor(in_clique_phi, 1) # depends on [control=['for'], data=['time_slice']]
out_clique_phi = self._shift_factor(update_factor, 0)
self._update_belief(end_bp, self.start_interface_clique, potential_dict[0], out_clique_phi)
evidence_0 = self._get_evidence(evidence, 0, 0)
if variable_dict[0]:
factor_values.update(end_bp.query(variable_dict[0], evidence_0, joint=False)) # depends on [control=['if'], data=[]]
return factor_values
|
def main(command_line=True, **kwargs):
"""
NAME
jr6_txt_magic.py
DESCRIPTION
converts JR6 .txt format files to magic_measurements format files
SYNTAX
jr6_txt_magic.py [command line options]
OPTIONS
-h: prints the help message and quits.
-f FILE: specify input file, or
-F FILE: specify output file, default is magic_measurements.txt
-Fsa: specify er_samples format file for appending, default is new er_samples.txt (Not working yet)
-spc NUM : specify number of characters to designate a specimen, default = 1
-loc LOCNAME : specify location/study name
-A: don't average replicate measurements
-ncn NCON: specify sample naming convention (6 and 7 not yet implemented)
-mcd [SO-MAG,SO-SUN,SO-SIGHT...] supply how these samples were oriented
-v NUM : specify the volume of the sample, default 2.5cm^3.
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX
[5] site name same as sample
[6] site is entered under a separate column NOT CURRENTLY SUPPORTED
[7-Z] [XXXX]YYY: XXXX is site designation with Z characters with sample name XXXXYYYY
NB: all others you will have to customize your self
or e-mail ltauxe@ucsd.edu for help.
INPUT
JR6 .txt format file
"""
# initialize some stuff
noave=0
volume = 2.5 * 1e-6 # default volume is 2.5 cm^3 (2.5 * 1e-6 meters^3)
inst=""
samp_con,Z='1',""
missing=1
demag="N"
er_location_name="unknown"
citation='This study'
args=sys.argv
meth_code="LP-NO"
specnum=-1
MagRecs=[]
version_num=pmag.get_version()
Samps=[] # keeps track of sample orientations
user=""
mag_file=""
dir_path='.'
ErSamps=[]
SampOuts=[]
samp_file = 'er_samples.txt'
meas_file = 'magic_measurements.txt'
#
# get command line arguments
#
if command_line:
if '-WD' in sys.argv:
ind = sys.argv.index('-WD')
dir_path=sys.argv[ind+1]
if '-ID' in sys.argv:
ind = sys.argv.index('-ID')
input_dir_path = sys.argv[ind+1]
else:
input_dir_path = dir_path
output_dir_path = dir_path
if "-h" in args:
print(main.__doc__)
return False
if '-F' in args:
ind=args.index("-F")
meas_file = args[ind+1]
if '-Fsa' in args:
ind = args.index("-Fsa")
samp_file = args[ind+1]
#try:
# open(samp_file,'r')
# ErSamps,file_type=pmag.magic_read(samp_file)
# print 'sample information will be appended to ', samp_file
#except:
# print samp_file,' not found: sample information will be stored in new er_samples.txt file'
# samp_file = output_dir_path+'/er_samples.txt'
if '-f' in args:
ind = args.index("-f")
mag_file= args[ind+1]
if "-spc" in args:
ind = args.index("-spc")
specnum = int(args[ind+1])
if "-ncn" in args:
ind=args.index("-ncn")
samp_con=sys.argv[ind+1]
if "-loc" in args:
ind=args.index("-loc")
er_location_name=args[ind+1]
if "-A" in args: noave=1
if "-mcd" in args:
ind=args.index("-mcd")
meth_code=args[ind+1]
if "-v" in args:
ind=args.index("-v")
volume=float(args[ind+1]) * 1e-6
if not command_line:
dir_path = kwargs.get('dir_path', '.')
input_dir_path = kwargs.get('input_dir_path', dir_path)
output_dir_path = dir_path
meas_file = kwargs.get('meas_file', 'magic_measurements.txt')
mag_file = kwargs.get('mag_file')
samp_file = kwargs.get('samp_file', 'er_samples.txt')
specnum = kwargs.get('specnum', 1)
samp_con = kwargs.get('samp_con', '1')
er_location_name = kwargs.get('er_location_name', '')
noave = kwargs.get('noave', 0) # default (0) means DO average
meth_code = kwargs.get('meth_code', "LP-NO")
volume = float(kwargs.get('volume', 0))
if not volume:
volume = 2.5 * 1e-6 #default volume is a 2.5 cm cube, translated to meters cubed
else:
#convert cm^3 to m^3
volume *= 1e-6
# format variables
mag_file = input_dir_path+"/" + mag_file
meas_file = output_dir_path+"/" + meas_file
samp_file = output_dir_path+"/" + samp_file
if specnum!=0:
specnum=-specnum
if "4" in samp_con:
if "-" not in samp_con:
print("option [4] must be in form 4-Z where Z is an integer")
return False, "option [4] must be in form 4-Z where Z is an integer"
else:
Z=samp_con.split("-")[1]
samp_con="4"
if "7" in samp_con:
if "-" not in samp_con:
print("option [7] must be in form 7-Z where Z is an integer")
return False, "option [7] must be in form 7-Z where Z is an integer"
else:
Z=samp_con.split("-")[1]
samp_con="7"
ErSampRec,ErSiteRec={},{}
# parse data
data=open(mag_file,'r')
line=data.readline()
line=data.readline()
line=data.readline()
while line !='':
parsedLine=line.split()
sampleName=parsedLine[0]
demagLevel=parsedLine[2]
date=parsedLine[3]
line=data.readline()
line=data.readline()
line=data.readline()
line=data.readline()
parsedLine=line.split()
specimenAngleDec=parsedLine[1]
specimenAngleInc=parsedLine[2]
while parsedLine[0] != 'MEAN' :
line=data.readline()
parsedLine=line.split()
if len(parsedLine) == 0:
parsedLine=["Hello"]
Mx=parsedLine[1]
My=parsedLine[2]
Mz=parsedLine[3]
line=data.readline()
line=data.readline()
parsedLine=line.split()
splitExp = parsedLine[2].split('A')
intensityVolStr=parsedLine[1] + splitExp[0]
intensityVol = float(intensityVolStr)
# check and see if Prec is too big and messes with the parcing.
precisionStr=''
if len(parsedLine) == 6: #normal line
precisionStr=parsedLine[5][0:-1]
else:
precisionStr=parsedLine[4][0:-1]
precisionPer = float(precisionStr)
precision=intensityVol*precisionPer/100
while parsedLine[0] != 'SPEC.' :
line=data.readline()
parsedLine=line.split()
if len(parsedLine) == 0:
parsedLine=["Hello"]
specimenDec=parsedLine[2]
specimenInc=parsedLine[3]
line=data.readline()
line=data.readline()
parsedLine=line.split()
geographicDec=parsedLine[1]
geographicInc=parsedLine[2]
# Add data to various MagIC data tables.
er_specimen_name = sampleName
if specnum!=0:
er_sample_name=er_specimen_name[:specnum]
else:
er_sample_name=er_specimen_name
if int(samp_con) in [1, 2, 3, 4, 5, 7]:
er_site_name=pmag.parse_site(er_sample_name,samp_con,Z)
else:
print("-W- Using unreognized sample convention option: ", samp_con)
# else:
# if 'er_site_name' in ErSampRec.keys():er_site_name=ErSampRec['er_site_name']
# if 'er_location_name' in ErSampRec.keys():er_location_name=ErSampRec['er_location_name']
# check sample list(SampOuts) to see if sample already exists in list before adding new sample info
sampleFlag=0
for sampRec in SampOuts:
if sampRec['er_sample_name'] == er_sample_name:
sampleFlag=1
break
if sampleFlag == 0:
ErSampRec['er_sample_name']=er_sample_name
ErSampRec['sample_azimuth']=specimenAngleDec
sample_dip=str(float(specimenAngleInc)-90.0) #convert to magic orientation
ErSampRec['sample_dip']=sample_dip
ErSampRec['magic_method_codes']=meth_code
ErSampRec['er_location_name']=er_location_name
ErSampRec['er_site_name']=er_site_name
ErSampRec['er_citation_names']='This study'
SampOuts.append(ErSampRec.copy())
MagRec={}
MagRec['measurement_description']='Date: '+date
MagRec["er_citation_names"]="This study"
MagRec['er_location_name']=er_location_name
MagRec['er_site_name']=er_site_name
MagRec['er_sample_name']=er_sample_name
MagRec['magic_software_packages']=version_num
MagRec["treatment_temp"]='%8.3e' % (273) # room temp in kelvin
MagRec["measurement_temp"]='%8.3e' % (273) # room temp in kelvin
MagRec["measurement_flag"]='g'
MagRec["measurement_standard"]='u'
MagRec["measurement_number"]='1'
MagRec["er_specimen_name"]=er_specimen_name
MagRec["treatment_ac_field"]='0'
if demagLevel == 'NRM':
meas_type="LT-NO"
elif demagLevel[0] == 'A':
meas_type="LT-AF-Z"
treat=float(demagLevel[1:])
MagRec["treatment_ac_field"]='%8.3e' %(treat*1e-3) # convert from mT to tesla
elif demagLevel[0] == 'T':
meas_type="LT-T-Z"
treat=float(demagLevel[1:])
MagRec["treatment_temp"]='%8.3e' % (treat+273.) # temp in kelvin
else:
print("measurement type unknown", demag_level)
return False, "measurement type unknown"
MagRec["measurement_magn_moment"]=str(intensityVol*volume) # Am^2
MagRec["measurement_magn_volume"]=intensityVolStr # A/m
MagRec["measurement_dec"]=specimenDec
MagRec["measurement_inc"]=specimenInc
MagRec['magic_method_codes']=meas_type
MagRecs.append(MagRec.copy())
#read lines till end of record
line=data.readline()
line=data.readline()
line=data.readline()
line=data.readline()
line=data.readline()
# read all the rest of the special characters. Some data files not consistantly formatted.
while (len(line) <=3 and line!=''):
line=data.readline()
#end of data while loop
MagOuts=pmag.measurements_methods(MagRecs,noave)
pmag.magic_write(samp_file,SampOuts,'er_samples')
print("sample orientations put in ",samp_file)
pmag.magic_write(meas_file,MagOuts,'magic_measurements')
print("results put in ",meas_file)
return True, meas_file
|
def function[main, parameter[command_line]]:
constant[
NAME
jr6_txt_magic.py
DESCRIPTION
converts JR6 .txt format files to magic_measurements format files
SYNTAX
jr6_txt_magic.py [command line options]
OPTIONS
-h: prints the help message and quits.
-f FILE: specify input file, or
-F FILE: specify output file, default is magic_measurements.txt
-Fsa: specify er_samples format file for appending, default is new er_samples.txt (Not working yet)
-spc NUM : specify number of characters to designate a specimen, default = 1
-loc LOCNAME : specify location/study name
-A: don't average replicate measurements
-ncn NCON: specify sample naming convention (6 and 7 not yet implemented)
-mcd [SO-MAG,SO-SUN,SO-SIGHT...] supply how these samples were oriented
-v NUM : specify the volume of the sample, default 2.5cm^3.
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX
[5] site name same as sample
[6] site is entered under a separate column NOT CURRENTLY SUPPORTED
[7-Z] [XXXX]YYY: XXXX is site designation with Z characters with sample name XXXXYYYY
NB: all others you will have to customize your self
or e-mail ltauxe@ucsd.edu for help.
INPUT
JR6 .txt format file
]
variable[noave] assign[=] constant[0]
variable[volume] assign[=] binary_operation[constant[2.5] * constant[1e-06]]
variable[inst] assign[=] constant[]
<ast.Tuple object at 0x7da20c76fe20> assign[=] tuple[[<ast.Constant object at 0x7da20c76db40>, <ast.Constant object at 0x7da20c76f5b0>]]
variable[missing] assign[=] constant[1]
variable[demag] assign[=] constant[N]
variable[er_location_name] assign[=] constant[unknown]
variable[citation] assign[=] constant[This study]
variable[args] assign[=] name[sys].argv
variable[meth_code] assign[=] constant[LP-NO]
variable[specnum] assign[=] <ast.UnaryOp object at 0x7da20c76fe50>
variable[MagRecs] assign[=] list[[]]
variable[version_num] assign[=] call[name[pmag].get_version, parameter[]]
variable[Samps] assign[=] list[[]]
variable[user] assign[=] constant[]
variable[mag_file] assign[=] constant[]
variable[dir_path] assign[=] constant[.]
variable[ErSamps] assign[=] list[[]]
variable[SampOuts] assign[=] list[[]]
variable[samp_file] assign[=] constant[er_samples.txt]
variable[meas_file] assign[=] constant[magic_measurements.txt]
if name[command_line] begin[:]
if compare[constant[-WD] in name[sys].argv] begin[:]
variable[ind] assign[=] call[name[sys].argv.index, parameter[constant[-WD]]]
variable[dir_path] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]]
if compare[constant[-ID] in name[sys].argv] begin[:]
variable[ind] assign[=] call[name[sys].argv.index, parameter[constant[-ID]]]
variable[input_dir_path] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]]
variable[output_dir_path] assign[=] name[dir_path]
if compare[constant[-h] in name[args]] begin[:]
call[name[print], parameter[name[main].__doc__]]
return[constant[False]]
if compare[constant[-F] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-F]]]
variable[meas_file] assign[=] call[name[args]][binary_operation[name[ind] + constant[1]]]
if compare[constant[-Fsa] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-Fsa]]]
variable[samp_file] assign[=] call[name[args]][binary_operation[name[ind] + constant[1]]]
if compare[constant[-f] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-f]]]
variable[mag_file] assign[=] call[name[args]][binary_operation[name[ind] + constant[1]]]
if compare[constant[-spc] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-spc]]]
variable[specnum] assign[=] call[name[int], parameter[call[name[args]][binary_operation[name[ind] + constant[1]]]]]
if compare[constant[-ncn] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-ncn]]]
variable[samp_con] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]]
if compare[constant[-loc] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-loc]]]
variable[er_location_name] assign[=] call[name[args]][binary_operation[name[ind] + constant[1]]]
if compare[constant[-A] in name[args]] begin[:]
variable[noave] assign[=] constant[1]
if compare[constant[-mcd] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-mcd]]]
variable[meth_code] assign[=] call[name[args]][binary_operation[name[ind] + constant[1]]]
if compare[constant[-v] in name[args]] begin[:]
variable[ind] assign[=] call[name[args].index, parameter[constant[-v]]]
variable[volume] assign[=] binary_operation[call[name[float], parameter[call[name[args]][binary_operation[name[ind] + constant[1]]]]] * constant[1e-06]]
if <ast.UnaryOp object at 0x7da20c76efe0> begin[:]
variable[dir_path] assign[=] call[name[kwargs].get, parameter[constant[dir_path], constant[.]]]
variable[input_dir_path] assign[=] call[name[kwargs].get, parameter[constant[input_dir_path], name[dir_path]]]
variable[output_dir_path] assign[=] name[dir_path]
variable[meas_file] assign[=] call[name[kwargs].get, parameter[constant[meas_file], constant[magic_measurements.txt]]]
variable[mag_file] assign[=] call[name[kwargs].get, parameter[constant[mag_file]]]
variable[samp_file] assign[=] call[name[kwargs].get, parameter[constant[samp_file], constant[er_samples.txt]]]
variable[specnum] assign[=] call[name[kwargs].get, parameter[constant[specnum], constant[1]]]
variable[samp_con] assign[=] call[name[kwargs].get, parameter[constant[samp_con], constant[1]]]
variable[er_location_name] assign[=] call[name[kwargs].get, parameter[constant[er_location_name], constant[]]]
variable[noave] assign[=] call[name[kwargs].get, parameter[constant[noave], constant[0]]]
variable[meth_code] assign[=] call[name[kwargs].get, parameter[constant[meth_code], constant[LP-NO]]]
variable[volume] assign[=] call[name[float], parameter[call[name[kwargs].get, parameter[constant[volume], constant[0]]]]]
if <ast.UnaryOp object at 0x7da18eb57e80> begin[:]
variable[volume] assign[=] binary_operation[constant[2.5] * constant[1e-06]]
variable[mag_file] assign[=] binary_operation[binary_operation[name[input_dir_path] + constant[/]] + name[mag_file]]
variable[meas_file] assign[=] binary_operation[binary_operation[name[output_dir_path] + constant[/]] + name[meas_file]]
variable[samp_file] assign[=] binary_operation[binary_operation[name[output_dir_path] + constant[/]] + name[samp_file]]
if compare[name[specnum] not_equal[!=] constant[0]] begin[:]
variable[specnum] assign[=] <ast.UnaryOp object at 0x7da18eb55a80>
if compare[constant[4] in name[samp_con]] begin[:]
if compare[constant[-] <ast.NotIn object at 0x7da2590d7190> name[samp_con]] begin[:]
call[name[print], parameter[constant[option [4] must be in form 4-Z where Z is an integer]]]
return[tuple[[<ast.Constant object at 0x7da18eb55870>, <ast.Constant object at 0x7da18eb56380>]]]
if compare[constant[7] in name[samp_con]] begin[:]
if compare[constant[-] <ast.NotIn object at 0x7da2590d7190> name[samp_con]] begin[:]
call[name[print], parameter[constant[option [7] must be in form 7-Z where Z is an integer]]]
return[tuple[[<ast.Constant object at 0x7da18eb550f0>, <ast.Constant object at 0x7da18eb542e0>]]]
<ast.Tuple object at 0x7da18eb56da0> assign[=] tuple[[<ast.Dict object at 0x7da18eb55270>, <ast.Dict object at 0x7da18eb57c70>]]
variable[data] assign[=] call[name[open], parameter[name[mag_file], constant[r]]]
variable[line] assign[=] call[name[data].readline, parameter[]]
variable[line] assign[=] call[name[data].readline, parameter[]]
variable[line] assign[=] call[name[data].readline, parameter[]]
while compare[name[line] not_equal[!=] constant[]] begin[:]
variable[parsedLine] assign[=] call[name[line].split, parameter[]]
variable[sampleName] assign[=] call[name[parsedLine]][constant[0]]
variable[demagLevel] assign[=] call[name[parsedLine]][constant[2]]
variable[date] assign[=] call[name[parsedLine]][constant[3]]
variable[line] assign[=] call[name[data].readline, parameter[]]
variable[line] assign[=] call[name[data].readline, parameter[]]
variable[line] assign[=] call[name[data].readline, parameter[]]
variable[line] assign[=] call[name[data].readline, parameter[]]
variable[parsedLine] assign[=] call[name[line].split, parameter[]]
variable[specimenAngleDec] assign[=] call[name[parsedLine]][constant[1]]
variable[specimenAngleInc] assign[=] call[name[parsedLine]][constant[2]]
while compare[call[name[parsedLine]][constant[0]] not_equal[!=] constant[MEAN]] begin[:]
variable[line] assign[=] call[name[data].readline, parameter[]]
variable[parsedLine] assign[=] call[name[line].split, parameter[]]
if compare[call[name[len], parameter[name[parsedLine]]] equal[==] constant[0]] begin[:]
variable[parsedLine] assign[=] list[[<ast.Constant object at 0x7da18eb54f70>]]
variable[Mx] assign[=] call[name[parsedLine]][constant[1]]
variable[My] assign[=] call[name[parsedLine]][constant[2]]
variable[Mz] assign[=] call[name[parsedLine]][constant[3]]
variable[line] assign[=] call[name[data].readline, parameter[]]
variable[line] assign[=] call[name[data].readline, parameter[]]
variable[parsedLine] assign[=] call[name[line].split, parameter[]]
variable[splitExp] assign[=] call[call[name[parsedLine]][constant[2]].split, parameter[constant[A]]]
variable[intensityVolStr] assign[=] binary_operation[call[name[parsedLine]][constant[1]] + call[name[splitExp]][constant[0]]]
variable[intensityVol] assign[=] call[name[float], parameter[name[intensityVolStr]]]
variable[precisionStr] assign[=] constant[]
if compare[call[name[len], parameter[name[parsedLine]]] equal[==] constant[6]] begin[:]
variable[precisionStr] assign[=] call[call[name[parsedLine]][constant[5]]][<ast.Slice object at 0x7da18eb54ac0>]
variable[precisionPer] assign[=] call[name[float], parameter[name[precisionStr]]]
variable[precision] assign[=] binary_operation[binary_operation[name[intensityVol] * name[precisionPer]] / constant[100]]
while compare[call[name[parsedLine]][constant[0]] not_equal[!=] constant[SPEC.]] begin[:]
variable[line] assign[=] call[name[data].readline, parameter[]]
variable[parsedLine] assign[=] call[name[line].split, parameter[]]
if compare[call[name[len], parameter[name[parsedLine]]] equal[==] constant[0]] begin[:]
variable[parsedLine] assign[=] list[[<ast.Constant object at 0x7da1b05be200>]]
variable[specimenDec] assign[=] call[name[parsedLine]][constant[2]]
variable[specimenInc] assign[=] call[name[parsedLine]][constant[3]]
variable[line] assign[=] call[name[data].readline, parameter[]]
variable[line] assign[=] call[name[data].readline, parameter[]]
variable[parsedLine] assign[=] call[name[line].split, parameter[]]
variable[geographicDec] assign[=] call[name[parsedLine]][constant[1]]
variable[geographicInc] assign[=] call[name[parsedLine]][constant[2]]
variable[er_specimen_name] assign[=] name[sampleName]
if compare[name[specnum] not_equal[!=] constant[0]] begin[:]
variable[er_sample_name] assign[=] call[name[er_specimen_name]][<ast.Slice object at 0x7da1b05bfdf0>]
if compare[call[name[int], parameter[name[samp_con]]] in list[[<ast.Constant object at 0x7da1b05bf4f0>, <ast.Constant object at 0x7da1b05bc3a0>, <ast.Constant object at 0x7da1b05bef50>, <ast.Constant object at 0x7da1b05be2c0>, <ast.Constant object at 0x7da1b05bece0>, <ast.Constant object at 0x7da1b05be770>]]] begin[:]
variable[er_site_name] assign[=] call[name[pmag].parse_site, parameter[name[er_sample_name], name[samp_con], name[Z]]]
variable[sampleFlag] assign[=] constant[0]
for taget[name[sampRec]] in starred[name[SampOuts]] begin[:]
if compare[call[name[sampRec]][constant[er_sample_name]] equal[==] name[er_sample_name]] begin[:]
variable[sampleFlag] assign[=] constant[1]
break
if compare[name[sampleFlag] equal[==] constant[0]] begin[:]
call[name[ErSampRec]][constant[er_sample_name]] assign[=] name[er_sample_name]
call[name[ErSampRec]][constant[sample_azimuth]] assign[=] name[specimenAngleDec]
variable[sample_dip] assign[=] call[name[str], parameter[binary_operation[call[name[float], parameter[name[specimenAngleInc]]] - constant[90.0]]]]
call[name[ErSampRec]][constant[sample_dip]] assign[=] name[sample_dip]
call[name[ErSampRec]][constant[magic_method_codes]] assign[=] name[meth_code]
call[name[ErSampRec]][constant[er_location_name]] assign[=] name[er_location_name]
call[name[ErSampRec]][constant[er_site_name]] assign[=] name[er_site_name]
call[name[ErSampRec]][constant[er_citation_names]] assign[=] constant[This study]
call[name[SampOuts].append, parameter[call[name[ErSampRec].copy, parameter[]]]]
variable[MagRec] assign[=] dictionary[[], []]
call[name[MagRec]][constant[measurement_description]] assign[=] binary_operation[constant[Date: ] + name[date]]
call[name[MagRec]][constant[er_citation_names]] assign[=] constant[This study]
call[name[MagRec]][constant[er_location_name]] assign[=] name[er_location_name]
call[name[MagRec]][constant[er_site_name]] assign[=] name[er_site_name]
call[name[MagRec]][constant[er_sample_name]] assign[=] name[er_sample_name]
call[name[MagRec]][constant[magic_software_packages]] assign[=] name[version_num]
call[name[MagRec]][constant[treatment_temp]] assign[=] binary_operation[constant[%8.3e] <ast.Mod object at 0x7da2590d6920> constant[273]]
call[name[MagRec]][constant[measurement_temp]] assign[=] binary_operation[constant[%8.3e] <ast.Mod object at 0x7da2590d6920> constant[273]]
call[name[MagRec]][constant[measurement_flag]] assign[=] constant[g]
call[name[MagRec]][constant[measurement_standard]] assign[=] constant[u]
call[name[MagRec]][constant[measurement_number]] assign[=] constant[1]
call[name[MagRec]][constant[er_specimen_name]] assign[=] name[er_specimen_name]
call[name[MagRec]][constant[treatment_ac_field]] assign[=] constant[0]
if compare[name[demagLevel] equal[==] constant[NRM]] begin[:]
variable[meas_type] assign[=] constant[LT-NO]
call[name[MagRec]][constant[measurement_magn_moment]] assign[=] call[name[str], parameter[binary_operation[name[intensityVol] * name[volume]]]]
call[name[MagRec]][constant[measurement_magn_volume]] assign[=] name[intensityVolStr]
call[name[MagRec]][constant[measurement_dec]] assign[=] name[specimenDec]
call[name[MagRec]][constant[measurement_inc]] assign[=] name[specimenInc]
call[name[MagRec]][constant[magic_method_codes]] assign[=] name[meas_type]
call[name[MagRecs].append, parameter[call[name[MagRec].copy, parameter[]]]]
variable[line] assign[=] call[name[data].readline, parameter[]]
variable[line] assign[=] call[name[data].readline, parameter[]]
variable[line] assign[=] call[name[data].readline, parameter[]]
variable[line] assign[=] call[name[data].readline, parameter[]]
variable[line] assign[=] call[name[data].readline, parameter[]]
while <ast.BoolOp object at 0x7da18f09e260> begin[:]
variable[line] assign[=] call[name[data].readline, parameter[]]
variable[MagOuts] assign[=] call[name[pmag].measurements_methods, parameter[name[MagRecs], name[noave]]]
call[name[pmag].magic_write, parameter[name[samp_file], name[SampOuts], constant[er_samples]]]
call[name[print], parameter[constant[sample orientations put in ], name[samp_file]]]
call[name[pmag].magic_write, parameter[name[meas_file], name[MagOuts], constant[magic_measurements]]]
call[name[print], parameter[constant[results put in ], name[meas_file]]]
return[tuple[[<ast.Constant object at 0x7da18f09cfa0>, <ast.Name object at 0x7da18f09d240>]]]
|
keyword[def] identifier[main] ( identifier[command_line] = keyword[True] ,** identifier[kwargs] ):
literal[string]
identifier[noave] = literal[int]
identifier[volume] = literal[int] * literal[int]
identifier[inst] = literal[string]
identifier[samp_con] , identifier[Z] = literal[string] , literal[string]
identifier[missing] = literal[int]
identifier[demag] = literal[string]
identifier[er_location_name] = literal[string]
identifier[citation] = literal[string]
identifier[args] = identifier[sys] . identifier[argv]
identifier[meth_code] = literal[string]
identifier[specnum] =- literal[int]
identifier[MagRecs] =[]
identifier[version_num] = identifier[pmag] . identifier[get_version] ()
identifier[Samps] =[]
identifier[user] = literal[string]
identifier[mag_file] = literal[string]
identifier[dir_path] = literal[string]
identifier[ErSamps] =[]
identifier[SampOuts] =[]
identifier[samp_file] = literal[string]
identifier[meas_file] = literal[string]
keyword[if] identifier[command_line] :
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] )
identifier[dir_path] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] )
identifier[input_dir_path] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
keyword[else] :
identifier[input_dir_path] = identifier[dir_path]
identifier[output_dir_path] = identifier[dir_path]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[print] ( identifier[main] . identifier[__doc__] )
keyword[return] keyword[False]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[meas_file] = identifier[args] [ identifier[ind] + literal[int] ]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[samp_file] = identifier[args] [ identifier[ind] + literal[int] ]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[mag_file] = identifier[args] [ identifier[ind] + literal[int] ]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[specnum] = identifier[int] ( identifier[args] [ identifier[ind] + literal[int] ])
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[samp_con] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[er_location_name] = identifier[args] [ identifier[ind] + literal[int] ]
keyword[if] literal[string] keyword[in] identifier[args] : identifier[noave] = literal[int]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[meth_code] = identifier[args] [ identifier[ind] + literal[int] ]
keyword[if] literal[string] keyword[in] identifier[args] :
identifier[ind] = identifier[args] . identifier[index] ( literal[string] )
identifier[volume] = identifier[float] ( identifier[args] [ identifier[ind] + literal[int] ])* literal[int]
keyword[if] keyword[not] identifier[command_line] :
identifier[dir_path] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[input_dir_path] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[dir_path] )
identifier[output_dir_path] = identifier[dir_path]
identifier[meas_file] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[mag_file] = identifier[kwargs] . identifier[get] ( literal[string] )
identifier[samp_file] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[specnum] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )
identifier[samp_con] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[er_location_name] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[noave] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )
identifier[meth_code] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[volume] = identifier[float] ( identifier[kwargs] . identifier[get] ( literal[string] , literal[int] ))
keyword[if] keyword[not] identifier[volume] :
identifier[volume] = literal[int] * literal[int]
keyword[else] :
identifier[volume] *= literal[int]
identifier[mag_file] = identifier[input_dir_path] + literal[string] + identifier[mag_file]
identifier[meas_file] = identifier[output_dir_path] + literal[string] + identifier[meas_file]
identifier[samp_file] = identifier[output_dir_path] + literal[string] + identifier[samp_file]
keyword[if] identifier[specnum] != literal[int] :
identifier[specnum] =- identifier[specnum]
keyword[if] literal[string] keyword[in] identifier[samp_con] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[samp_con] :
identifier[print] ( literal[string] )
keyword[return] keyword[False] , literal[string]
keyword[else] :
identifier[Z] = identifier[samp_con] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[samp_con] = literal[string]
keyword[if] literal[string] keyword[in] identifier[samp_con] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[samp_con] :
identifier[print] ( literal[string] )
keyword[return] keyword[False] , literal[string]
keyword[else] :
identifier[Z] = identifier[samp_con] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[samp_con] = literal[string]
identifier[ErSampRec] , identifier[ErSiteRec] ={},{}
identifier[data] = identifier[open] ( identifier[mag_file] , literal[string] )
identifier[line] = identifier[data] . identifier[readline] ()
identifier[line] = identifier[data] . identifier[readline] ()
identifier[line] = identifier[data] . identifier[readline] ()
keyword[while] identifier[line] != literal[string] :
identifier[parsedLine] = identifier[line] . identifier[split] ()
identifier[sampleName] = identifier[parsedLine] [ literal[int] ]
identifier[demagLevel] = identifier[parsedLine] [ literal[int] ]
identifier[date] = identifier[parsedLine] [ literal[int] ]
identifier[line] = identifier[data] . identifier[readline] ()
identifier[line] = identifier[data] . identifier[readline] ()
identifier[line] = identifier[data] . identifier[readline] ()
identifier[line] = identifier[data] . identifier[readline] ()
identifier[parsedLine] = identifier[line] . identifier[split] ()
identifier[specimenAngleDec] = identifier[parsedLine] [ literal[int] ]
identifier[specimenAngleInc] = identifier[parsedLine] [ literal[int] ]
keyword[while] identifier[parsedLine] [ literal[int] ]!= literal[string] :
identifier[line] = identifier[data] . identifier[readline] ()
identifier[parsedLine] = identifier[line] . identifier[split] ()
keyword[if] identifier[len] ( identifier[parsedLine] )== literal[int] :
identifier[parsedLine] =[ literal[string] ]
identifier[Mx] = identifier[parsedLine] [ literal[int] ]
identifier[My] = identifier[parsedLine] [ literal[int] ]
identifier[Mz] = identifier[parsedLine] [ literal[int] ]
identifier[line] = identifier[data] . identifier[readline] ()
identifier[line] = identifier[data] . identifier[readline] ()
identifier[parsedLine] = identifier[line] . identifier[split] ()
identifier[splitExp] = identifier[parsedLine] [ literal[int] ]. identifier[split] ( literal[string] )
identifier[intensityVolStr] = identifier[parsedLine] [ literal[int] ]+ identifier[splitExp] [ literal[int] ]
identifier[intensityVol] = identifier[float] ( identifier[intensityVolStr] )
identifier[precisionStr] = literal[string]
keyword[if] identifier[len] ( identifier[parsedLine] )== literal[int] :
identifier[precisionStr] = identifier[parsedLine] [ literal[int] ][ literal[int] :- literal[int] ]
keyword[else] :
identifier[precisionStr] = identifier[parsedLine] [ literal[int] ][ literal[int] :- literal[int] ]
identifier[precisionPer] = identifier[float] ( identifier[precisionStr] )
identifier[precision] = identifier[intensityVol] * identifier[precisionPer] / literal[int]
keyword[while] identifier[parsedLine] [ literal[int] ]!= literal[string] :
identifier[line] = identifier[data] . identifier[readline] ()
identifier[parsedLine] = identifier[line] . identifier[split] ()
keyword[if] identifier[len] ( identifier[parsedLine] )== literal[int] :
identifier[parsedLine] =[ literal[string] ]
identifier[specimenDec] = identifier[parsedLine] [ literal[int] ]
identifier[specimenInc] = identifier[parsedLine] [ literal[int] ]
identifier[line] = identifier[data] . identifier[readline] ()
identifier[line] = identifier[data] . identifier[readline] ()
identifier[parsedLine] = identifier[line] . identifier[split] ()
identifier[geographicDec] = identifier[parsedLine] [ literal[int] ]
identifier[geographicInc] = identifier[parsedLine] [ literal[int] ]
identifier[er_specimen_name] = identifier[sampleName]
keyword[if] identifier[specnum] != literal[int] :
identifier[er_sample_name] = identifier[er_specimen_name] [: identifier[specnum] ]
keyword[else] :
identifier[er_sample_name] = identifier[er_specimen_name]
keyword[if] identifier[int] ( identifier[samp_con] ) keyword[in] [ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ]:
identifier[er_site_name] = identifier[pmag] . identifier[parse_site] ( identifier[er_sample_name] , identifier[samp_con] , identifier[Z] )
keyword[else] :
identifier[print] ( literal[string] , identifier[samp_con] )
identifier[sampleFlag] = literal[int]
keyword[for] identifier[sampRec] keyword[in] identifier[SampOuts] :
keyword[if] identifier[sampRec] [ literal[string] ]== identifier[er_sample_name] :
identifier[sampleFlag] = literal[int]
keyword[break]
keyword[if] identifier[sampleFlag] == literal[int] :
identifier[ErSampRec] [ literal[string] ]= identifier[er_sample_name]
identifier[ErSampRec] [ literal[string] ]= identifier[specimenAngleDec]
identifier[sample_dip] = identifier[str] ( identifier[float] ( identifier[specimenAngleInc] )- literal[int] )
identifier[ErSampRec] [ literal[string] ]= identifier[sample_dip]
identifier[ErSampRec] [ literal[string] ]= identifier[meth_code]
identifier[ErSampRec] [ literal[string] ]= identifier[er_location_name]
identifier[ErSampRec] [ literal[string] ]= identifier[er_site_name]
identifier[ErSampRec] [ literal[string] ]= literal[string]
identifier[SampOuts] . identifier[append] ( identifier[ErSampRec] . identifier[copy] ())
identifier[MagRec] ={}
identifier[MagRec] [ literal[string] ]= literal[string] + identifier[date]
identifier[MagRec] [ literal[string] ]= literal[string]
identifier[MagRec] [ literal[string] ]= identifier[er_location_name]
identifier[MagRec] [ literal[string] ]= identifier[er_site_name]
identifier[MagRec] [ literal[string] ]= identifier[er_sample_name]
identifier[MagRec] [ literal[string] ]= identifier[version_num]
identifier[MagRec] [ literal[string] ]= literal[string] %( literal[int] )
identifier[MagRec] [ literal[string] ]= literal[string] %( literal[int] )
identifier[MagRec] [ literal[string] ]= literal[string]
identifier[MagRec] [ literal[string] ]= literal[string]
identifier[MagRec] [ literal[string] ]= literal[string]
identifier[MagRec] [ literal[string] ]= identifier[er_specimen_name]
identifier[MagRec] [ literal[string] ]= literal[string]
keyword[if] identifier[demagLevel] == literal[string] :
identifier[meas_type] = literal[string]
keyword[elif] identifier[demagLevel] [ literal[int] ]== literal[string] :
identifier[meas_type] = literal[string]
identifier[treat] = identifier[float] ( identifier[demagLevel] [ literal[int] :])
identifier[MagRec] [ literal[string] ]= literal[string] %( identifier[treat] * literal[int] )
keyword[elif] identifier[demagLevel] [ literal[int] ]== literal[string] :
identifier[meas_type] = literal[string]
identifier[treat] = identifier[float] ( identifier[demagLevel] [ literal[int] :])
identifier[MagRec] [ literal[string] ]= literal[string] %( identifier[treat] + literal[int] )
keyword[else] :
identifier[print] ( literal[string] , identifier[demag_level] )
keyword[return] keyword[False] , literal[string]
identifier[MagRec] [ literal[string] ]= identifier[str] ( identifier[intensityVol] * identifier[volume] )
identifier[MagRec] [ literal[string] ]= identifier[intensityVolStr]
identifier[MagRec] [ literal[string] ]= identifier[specimenDec]
identifier[MagRec] [ literal[string] ]= identifier[specimenInc]
identifier[MagRec] [ literal[string] ]= identifier[meas_type]
identifier[MagRecs] . identifier[append] ( identifier[MagRec] . identifier[copy] ())
identifier[line] = identifier[data] . identifier[readline] ()
identifier[line] = identifier[data] . identifier[readline] ()
identifier[line] = identifier[data] . identifier[readline] ()
identifier[line] = identifier[data] . identifier[readline] ()
identifier[line] = identifier[data] . identifier[readline] ()
keyword[while] ( identifier[len] ( identifier[line] )<= literal[int] keyword[and] identifier[line] != literal[string] ):
identifier[line] = identifier[data] . identifier[readline] ()
identifier[MagOuts] = identifier[pmag] . identifier[measurements_methods] ( identifier[MagRecs] , identifier[noave] )
identifier[pmag] . identifier[magic_write] ( identifier[samp_file] , identifier[SampOuts] , literal[string] )
identifier[print] ( literal[string] , identifier[samp_file] )
identifier[pmag] . identifier[magic_write] ( identifier[meas_file] , identifier[MagOuts] , literal[string] )
identifier[print] ( literal[string] , identifier[meas_file] )
keyword[return] keyword[True] , identifier[meas_file]
|
def main(command_line=True, **kwargs):
"""
NAME
jr6_txt_magic.py
DESCRIPTION
converts JR6 .txt format files to magic_measurements format files
SYNTAX
jr6_txt_magic.py [command line options]
OPTIONS
-h: prints the help message and quits.
-f FILE: specify input file, or
-F FILE: specify output file, default is magic_measurements.txt
-Fsa: specify er_samples format file for appending, default is new er_samples.txt (Not working yet)
-spc NUM : specify number of characters to designate a specimen, default = 1
-loc LOCNAME : specify location/study name
-A: don't average replicate measurements
-ncn NCON: specify sample naming convention (6 and 7 not yet implemented)
-mcd [SO-MAG,SO-SUN,SO-SIGHT...] supply how these samples were oriented
-v NUM : specify the volume of the sample, default 2.5cm^3.
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX
[5] site name same as sample
[6] site is entered under a separate column NOT CURRENTLY SUPPORTED
[7-Z] [XXXX]YYY: XXXX is site designation with Z characters with sample name XXXXYYYY
NB: all others you will have to customize your self
or e-mail ltauxe@ucsd.edu for help.
INPUT
JR6 .txt format file
"""
# initialize some stuff
noave = 0
volume = 2.5 * 1e-06 # default volume is 2.5 cm^3 (2.5 * 1e-6 meters^3)
inst = ''
(samp_con, Z) = ('1', '')
missing = 1
demag = 'N'
er_location_name = 'unknown'
citation = 'This study'
args = sys.argv
meth_code = 'LP-NO'
specnum = -1
MagRecs = []
version_num = pmag.get_version()
Samps = [] # keeps track of sample orientations
user = ''
mag_file = ''
dir_path = '.'
ErSamps = []
SampOuts = []
samp_file = 'er_samples.txt'
meas_file = 'magic_measurements.txt'
#
# get command line arguments
#
if command_line:
if '-WD' in sys.argv:
ind = sys.argv.index('-WD')
dir_path = sys.argv[ind + 1] # depends on [control=['if'], data=[]]
if '-ID' in sys.argv:
ind = sys.argv.index('-ID')
input_dir_path = sys.argv[ind + 1] # depends on [control=['if'], data=[]]
else:
input_dir_path = dir_path
output_dir_path = dir_path
if '-h' in args:
print(main.__doc__)
return False # depends on [control=['if'], data=[]]
if '-F' in args:
ind = args.index('-F')
meas_file = args[ind + 1] # depends on [control=['if'], data=['args']]
if '-Fsa' in args:
ind = args.index('-Fsa')
samp_file = args[ind + 1] # depends on [control=['if'], data=['args']]
#try:
# open(samp_file,'r')
# ErSamps,file_type=pmag.magic_read(samp_file)
# print 'sample information will be appended to ', samp_file
#except:
# print samp_file,' not found: sample information will be stored in new er_samples.txt file'
# samp_file = output_dir_path+'/er_samples.txt'
if '-f' in args:
ind = args.index('-f')
mag_file = args[ind + 1] # depends on [control=['if'], data=['args']]
if '-spc' in args:
ind = args.index('-spc')
specnum = int(args[ind + 1]) # depends on [control=['if'], data=['args']]
if '-ncn' in args:
ind = args.index('-ncn')
samp_con = sys.argv[ind + 1] # depends on [control=['if'], data=['args']]
if '-loc' in args:
ind = args.index('-loc')
er_location_name = args[ind + 1] # depends on [control=['if'], data=['args']]
if '-A' in args:
noave = 1 # depends on [control=['if'], data=[]]
if '-mcd' in args:
ind = args.index('-mcd')
meth_code = args[ind + 1] # depends on [control=['if'], data=['args']]
if '-v' in args:
ind = args.index('-v')
volume = float(args[ind + 1]) * 1e-06 # depends on [control=['if'], data=['args']] # depends on [control=['if'], data=[]]
if not command_line:
dir_path = kwargs.get('dir_path', '.')
input_dir_path = kwargs.get('input_dir_path', dir_path)
output_dir_path = dir_path
meas_file = kwargs.get('meas_file', 'magic_measurements.txt')
mag_file = kwargs.get('mag_file')
samp_file = kwargs.get('samp_file', 'er_samples.txt')
specnum = kwargs.get('specnum', 1)
samp_con = kwargs.get('samp_con', '1')
er_location_name = kwargs.get('er_location_name', '')
noave = kwargs.get('noave', 0) # default (0) means DO average
meth_code = kwargs.get('meth_code', 'LP-NO')
volume = float(kwargs.get('volume', 0))
if not volume:
volume = 2.5 * 1e-06 #default volume is a 2.5 cm cube, translated to meters cubed # depends on [control=['if'], data=[]]
else:
#convert cm^3 to m^3
volume *= 1e-06 # depends on [control=['if'], data=[]]
# format variables
mag_file = input_dir_path + '/' + mag_file
meas_file = output_dir_path + '/' + meas_file
samp_file = output_dir_path + '/' + samp_file
if specnum != 0:
specnum = -specnum # depends on [control=['if'], data=['specnum']]
if '4' in samp_con:
if '-' not in samp_con:
print('option [4] must be in form 4-Z where Z is an integer')
return (False, 'option [4] must be in form 4-Z where Z is an integer') # depends on [control=['if'], data=[]]
else:
Z = samp_con.split('-')[1]
samp_con = '4' # depends on [control=['if'], data=['samp_con']]
if '7' in samp_con:
if '-' not in samp_con:
print('option [7] must be in form 7-Z where Z is an integer')
return (False, 'option [7] must be in form 7-Z where Z is an integer') # depends on [control=['if'], data=[]]
else:
Z = samp_con.split('-')[1]
samp_con = '7' # depends on [control=['if'], data=['samp_con']]
(ErSampRec, ErSiteRec) = ({}, {})
# parse data
data = open(mag_file, 'r')
line = data.readline()
line = data.readline()
line = data.readline()
while line != '':
parsedLine = line.split()
sampleName = parsedLine[0]
demagLevel = parsedLine[2]
date = parsedLine[3]
line = data.readline()
line = data.readline()
line = data.readline()
line = data.readline()
parsedLine = line.split()
specimenAngleDec = parsedLine[1]
specimenAngleInc = parsedLine[2]
while parsedLine[0] != 'MEAN':
line = data.readline()
parsedLine = line.split()
if len(parsedLine) == 0:
parsedLine = ['Hello'] # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
Mx = parsedLine[1]
My = parsedLine[2]
Mz = parsedLine[3]
line = data.readline()
line = data.readline()
parsedLine = line.split()
splitExp = parsedLine[2].split('A')
intensityVolStr = parsedLine[1] + splitExp[0]
intensityVol = float(intensityVolStr)
# check and see if Prec is too big and messes with the parcing.
precisionStr = ''
if len(parsedLine) == 6: #normal line
precisionStr = parsedLine[5][0:-1] # depends on [control=['if'], data=[]]
else:
precisionStr = parsedLine[4][0:-1]
precisionPer = float(precisionStr)
precision = intensityVol * precisionPer / 100
while parsedLine[0] != 'SPEC.':
line = data.readline()
parsedLine = line.split()
if len(parsedLine) == 0:
parsedLine = ['Hello'] # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
specimenDec = parsedLine[2]
specimenInc = parsedLine[3]
line = data.readline()
line = data.readline()
parsedLine = line.split()
geographicDec = parsedLine[1]
geographicInc = parsedLine[2]
# Add data to various MagIC data tables.
er_specimen_name = sampleName
if specnum != 0:
er_sample_name = er_specimen_name[:specnum] # depends on [control=['if'], data=['specnum']]
else:
er_sample_name = er_specimen_name
if int(samp_con) in [1, 2, 3, 4, 5, 7]:
er_site_name = pmag.parse_site(er_sample_name, samp_con, Z) # depends on [control=['if'], data=[]]
else:
print('-W- Using unreognized sample convention option: ', samp_con)
# else:
# if 'er_site_name' in ErSampRec.keys():er_site_name=ErSampRec['er_site_name']
# if 'er_location_name' in ErSampRec.keys():er_location_name=ErSampRec['er_location_name']
# check sample list(SampOuts) to see if sample already exists in list before adding new sample info
sampleFlag = 0
for sampRec in SampOuts:
if sampRec['er_sample_name'] == er_sample_name:
sampleFlag = 1
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sampRec']]
if sampleFlag == 0:
ErSampRec['er_sample_name'] = er_sample_name
ErSampRec['sample_azimuth'] = specimenAngleDec
sample_dip = str(float(specimenAngleInc) - 90.0) #convert to magic orientation
ErSampRec['sample_dip'] = sample_dip
ErSampRec['magic_method_codes'] = meth_code
ErSampRec['er_location_name'] = er_location_name
ErSampRec['er_site_name'] = er_site_name
ErSampRec['er_citation_names'] = 'This study'
SampOuts.append(ErSampRec.copy()) # depends on [control=['if'], data=[]]
MagRec = {}
MagRec['measurement_description'] = 'Date: ' + date
MagRec['er_citation_names'] = 'This study'
MagRec['er_location_name'] = er_location_name
MagRec['er_site_name'] = er_site_name
MagRec['er_sample_name'] = er_sample_name
MagRec['magic_software_packages'] = version_num
MagRec['treatment_temp'] = '%8.3e' % 273 # room temp in kelvin
MagRec['measurement_temp'] = '%8.3e' % 273 # room temp in kelvin
MagRec['measurement_flag'] = 'g'
MagRec['measurement_standard'] = 'u'
MagRec['measurement_number'] = '1'
MagRec['er_specimen_name'] = er_specimen_name
MagRec['treatment_ac_field'] = '0'
if demagLevel == 'NRM':
meas_type = 'LT-NO' # depends on [control=['if'], data=[]]
elif demagLevel[0] == 'A':
meas_type = 'LT-AF-Z'
treat = float(demagLevel[1:])
MagRec['treatment_ac_field'] = '%8.3e' % (treat * 0.001) # convert from mT to tesla # depends on [control=['if'], data=[]]
elif demagLevel[0] == 'T':
meas_type = 'LT-T-Z'
treat = float(demagLevel[1:])
MagRec['treatment_temp'] = '%8.3e' % (treat + 273.0) # temp in kelvin # depends on [control=['if'], data=[]]
else:
print('measurement type unknown', demag_level)
return (False, 'measurement type unknown')
MagRec['measurement_magn_moment'] = str(intensityVol * volume) # Am^2
MagRec['measurement_magn_volume'] = intensityVolStr # A/m
MagRec['measurement_dec'] = specimenDec
MagRec['measurement_inc'] = specimenInc
MagRec['magic_method_codes'] = meas_type
MagRecs.append(MagRec.copy())
#read lines till end of record
line = data.readline()
line = data.readline()
line = data.readline()
line = data.readline()
line = data.readline()
# read all the rest of the special characters. Some data files not consistantly formatted.
while len(line) <= 3 and line != '':
line = data.readline() # depends on [control=['while'], data=[]] # depends on [control=['while'], data=['line']]
#end of data while loop
MagOuts = pmag.measurements_methods(MagRecs, noave)
pmag.magic_write(samp_file, SampOuts, 'er_samples')
print('sample orientations put in ', samp_file)
pmag.magic_write(meas_file, MagOuts, 'magic_measurements')
print('results put in ', meas_file)
return (True, meas_file)
|
def parse_responsive_length(responsive_length):
"""
Takes a string containing a length definition in pixels or percent and parses it to obtain
a computational length. It returns a tuple where the first element is the length in pixels and
the second element is its length in percent divided by 100.
Note that one of both returned elements is None.
"""
responsive_length = responsive_length.strip()
if responsive_length.endswith('px'):
return (int(responsive_length.rstrip('px')), None)
elif responsive_length.endswith('%'):
return (None, float(responsive_length.rstrip('%')) / 100)
return (None, None)
|
def function[parse_responsive_length, parameter[responsive_length]]:
constant[
Takes a string containing a length definition in pixels or percent and parses it to obtain
a computational length. It returns a tuple where the first element is the length in pixels and
the second element is its length in percent divided by 100.
Note that one of both returned elements is None.
]
variable[responsive_length] assign[=] call[name[responsive_length].strip, parameter[]]
if call[name[responsive_length].endswith, parameter[constant[px]]] begin[:]
return[tuple[[<ast.Call object at 0x7da1b13a9180>, <ast.Constant object at 0x7da20c6e7b20>]]]
return[tuple[[<ast.Constant object at 0x7da20c6e72b0>, <ast.Constant object at 0x7da20c6e64a0>]]]
|
keyword[def] identifier[parse_responsive_length] ( identifier[responsive_length] ):
literal[string]
identifier[responsive_length] = identifier[responsive_length] . identifier[strip] ()
keyword[if] identifier[responsive_length] . identifier[endswith] ( literal[string] ):
keyword[return] ( identifier[int] ( identifier[responsive_length] . identifier[rstrip] ( literal[string] )), keyword[None] )
keyword[elif] identifier[responsive_length] . identifier[endswith] ( literal[string] ):
keyword[return] ( keyword[None] , identifier[float] ( identifier[responsive_length] . identifier[rstrip] ( literal[string] ))/ literal[int] )
keyword[return] ( keyword[None] , keyword[None] )
|
def parse_responsive_length(responsive_length):
"""
Takes a string containing a length definition in pixels or percent and parses it to obtain
a computational length. It returns a tuple where the first element is the length in pixels and
the second element is its length in percent divided by 100.
Note that one of both returned elements is None.
"""
responsive_length = responsive_length.strip()
if responsive_length.endswith('px'):
return (int(responsive_length.rstrip('px')), None) # depends on [control=['if'], data=[]]
elif responsive_length.endswith('%'):
return (None, float(responsive_length.rstrip('%')) / 100) # depends on [control=['if'], data=[]]
return (None, None)
|
async def result_continuation(task):
"""A preliminary result processor we'll chain on to the original task
This will get executed wherever the source task was executed, in this
case one of the threads in the ThreadPoolExecutor"""
await asyncio.sleep(0.1)
num, res = task.result()
return num, res * 2
|
<ast.AsyncFunctionDef object at 0x7da18c4ccb80>
|
keyword[async] keyword[def] identifier[result_continuation] ( identifier[task] ):
literal[string]
keyword[await] identifier[asyncio] . identifier[sleep] ( literal[int] )
identifier[num] , identifier[res] = identifier[task] . identifier[result] ()
keyword[return] identifier[num] , identifier[res] * literal[int]
|
async def result_continuation(task):
"""A preliminary result processor we'll chain on to the original task
This will get executed wherever the source task was executed, in this
case one of the threads in the ThreadPoolExecutor"""
await asyncio.sleep(0.1)
(num, res) = task.result()
return (num, res * 2)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.