code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def main(demo=False, aschild=False, targets=[]):
"""Start the Qt-runtime and show the window
Arguments:
aschild (bool, optional): Run as child of parent process
"""
if aschild:
print("Starting pyblish-qml")
compat.main()
app = Application(APP_PATH, targets)
app.listen()
print("Done, don't forget to call `show()`")
return app.exec_()
else:
print("Starting pyblish-qml server..")
service = ipc.service.MockService() if demo else ipc.service.Service()
server = ipc.server.Server(service, targets=targets)
proxy = ipc.server.Proxy(server)
proxy.show(settings.to_dict())
server.listen()
server.wait()
|
def function[main, parameter[demo, aschild, targets]]:
constant[Start the Qt-runtime and show the window
Arguments:
aschild (bool, optional): Run as child of parent process
]
if name[aschild] begin[:]
call[name[print], parameter[constant[Starting pyblish-qml]]]
call[name[compat].main, parameter[]]
variable[app] assign[=] call[name[Application], parameter[name[APP_PATH], name[targets]]]
call[name[app].listen, parameter[]]
call[name[print], parameter[constant[Done, don't forget to call `show()`]]]
return[call[name[app].exec_, parameter[]]]
|
keyword[def] identifier[main] ( identifier[demo] = keyword[False] , identifier[aschild] = keyword[False] , identifier[targets] =[]):
literal[string]
keyword[if] identifier[aschild] :
identifier[print] ( literal[string] )
identifier[compat] . identifier[main] ()
identifier[app] = identifier[Application] ( identifier[APP_PATH] , identifier[targets] )
identifier[app] . identifier[listen] ()
identifier[print] ( literal[string] )
keyword[return] identifier[app] . identifier[exec_] ()
keyword[else] :
identifier[print] ( literal[string] )
identifier[service] = identifier[ipc] . identifier[service] . identifier[MockService] () keyword[if] identifier[demo] keyword[else] identifier[ipc] . identifier[service] . identifier[Service] ()
identifier[server] = identifier[ipc] . identifier[server] . identifier[Server] ( identifier[service] , identifier[targets] = identifier[targets] )
identifier[proxy] = identifier[ipc] . identifier[server] . identifier[Proxy] ( identifier[server] )
identifier[proxy] . identifier[show] ( identifier[settings] . identifier[to_dict] ())
identifier[server] . identifier[listen] ()
identifier[server] . identifier[wait] ()
|
def main(demo=False, aschild=False, targets=[]):
"""Start the Qt-runtime and show the window
Arguments:
aschild (bool, optional): Run as child of parent process
"""
if aschild:
print('Starting pyblish-qml')
compat.main()
app = Application(APP_PATH, targets)
app.listen()
print("Done, don't forget to call `show()`")
return app.exec_() # depends on [control=['if'], data=[]]
else:
print('Starting pyblish-qml server..')
service = ipc.service.MockService() if demo else ipc.service.Service()
server = ipc.server.Server(service, targets=targets)
proxy = ipc.server.Proxy(server)
proxy.show(settings.to_dict())
server.listen()
server.wait()
|
def register_on_show_window(self, callback):
"""Set the callback function to consume on show window events.
This occurs when the console window is to be activated and brought to
the foreground of the desktop of the host PC.
Callback receives a IShowWindowEvent object.
Returns the callback_id
"""
event_type = library.VBoxEventType.on_show_window
return self.event_source.register_callback(callback, event_type)
|
def function[register_on_show_window, parameter[self, callback]]:
constant[Set the callback function to consume on show window events.
This occurs when the console window is to be activated and brought to
the foreground of the desktop of the host PC.
Callback receives a IShowWindowEvent object.
Returns the callback_id
]
variable[event_type] assign[=] name[library].VBoxEventType.on_show_window
return[call[name[self].event_source.register_callback, parameter[name[callback], name[event_type]]]]
|
keyword[def] identifier[register_on_show_window] ( identifier[self] , identifier[callback] ):
literal[string]
identifier[event_type] = identifier[library] . identifier[VBoxEventType] . identifier[on_show_window]
keyword[return] identifier[self] . identifier[event_source] . identifier[register_callback] ( identifier[callback] , identifier[event_type] )
|
def register_on_show_window(self, callback):
"""Set the callback function to consume on show window events.
This occurs when the console window is to be activated and brought to
the foreground of the desktop of the host PC.
Callback receives a IShowWindowEvent object.
Returns the callback_id
"""
event_type = library.VBoxEventType.on_show_window
return self.event_source.register_callback(callback, event_type)
|
def open_dut(self, port=None):
"""
Open connection to dut.
:param port: com port to use.
:return:
"""
if port is not None:
self.comport = port
try:
self.open_connection()
except (DutConnectionError, ValueError) as err:
self.close_dut(use_prepare=False)
raise DutConnectionError(str(err))
except KeyboardInterrupt:
self.close_dut(use_prepare=False)
self.close_connection()
raise
|
def function[open_dut, parameter[self, port]]:
constant[
Open connection to dut.
:param port: com port to use.
:return:
]
if compare[name[port] is_not constant[None]] begin[:]
name[self].comport assign[=] name[port]
<ast.Try object at 0x7da20e956710>
|
keyword[def] identifier[open_dut] ( identifier[self] , identifier[port] = keyword[None] ):
literal[string]
keyword[if] identifier[port] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[comport] = identifier[port]
keyword[try] :
identifier[self] . identifier[open_connection] ()
keyword[except] ( identifier[DutConnectionError] , identifier[ValueError] ) keyword[as] identifier[err] :
identifier[self] . identifier[close_dut] ( identifier[use_prepare] = keyword[False] )
keyword[raise] identifier[DutConnectionError] ( identifier[str] ( identifier[err] ))
keyword[except] identifier[KeyboardInterrupt] :
identifier[self] . identifier[close_dut] ( identifier[use_prepare] = keyword[False] )
identifier[self] . identifier[close_connection] ()
keyword[raise]
|
def open_dut(self, port=None):
"""
Open connection to dut.
:param port: com port to use.
:return:
"""
if port is not None:
self.comport = port # depends on [control=['if'], data=['port']]
try:
self.open_connection() # depends on [control=['try'], data=[]]
except (DutConnectionError, ValueError) as err:
self.close_dut(use_prepare=False)
raise DutConnectionError(str(err)) # depends on [control=['except'], data=['err']]
except KeyboardInterrupt:
self.close_dut(use_prepare=False)
self.close_connection()
raise # depends on [control=['except'], data=[]]
|
def run(self):
"""Main service entrypoint. Called via Thread.start() via PantsDaemon.run()."""
while not self._state.is_terminating:
self._maybe_garbage_collect()
self._maybe_extend_lease()
# Waiting with a timeout in maybe_pause has the effect of waiting until:
# 1) we are paused and then resumed
# 2) we are terminated (which will break the loop)
# 3) the timeout is reached, which will cause us to wake up and check gc/leases
self._state.maybe_pause(timeout=10)
|
def function[run, parameter[self]]:
constant[Main service entrypoint. Called via Thread.start() via PantsDaemon.run().]
while <ast.UnaryOp object at 0x7da1b2278340> begin[:]
call[name[self]._maybe_garbage_collect, parameter[]]
call[name[self]._maybe_extend_lease, parameter[]]
call[name[self]._state.maybe_pause, parameter[]]
|
keyword[def] identifier[run] ( identifier[self] ):
literal[string]
keyword[while] keyword[not] identifier[self] . identifier[_state] . identifier[is_terminating] :
identifier[self] . identifier[_maybe_garbage_collect] ()
identifier[self] . identifier[_maybe_extend_lease] ()
identifier[self] . identifier[_state] . identifier[maybe_pause] ( identifier[timeout] = literal[int] )
|
def run(self):
"""Main service entrypoint. Called via Thread.start() via PantsDaemon.run()."""
while not self._state.is_terminating:
self._maybe_garbage_collect()
self._maybe_extend_lease()
# Waiting with a timeout in maybe_pause has the effect of waiting until:
# 1) we are paused and then resumed
# 2) we are terminated (which will break the loop)
# 3) the timeout is reached, which will cause us to wake up and check gc/leases
self._state.maybe_pause(timeout=10) # depends on [control=['while'], data=[]]
|
def handleOneClientMsg(self, wrappedMsg):
"""
Validate and process a client message
:param wrappedMsg: a message from a client
"""
try:
vmsg = self.validateClientMsg(wrappedMsg)
if vmsg:
self.unpackClientMsg(*vmsg)
except BlowUp:
raise
except Exception as ex:
msg, frm = wrappedMsg
friendly = friendlyEx(ex)
if isinstance(ex, SuspiciousClient):
self.reportSuspiciousClient(frm, friendly)
self.handleInvalidClientMsg(ex, wrappedMsg)
|
def function[handleOneClientMsg, parameter[self, wrappedMsg]]:
constant[
Validate and process a client message
:param wrappedMsg: a message from a client
]
<ast.Try object at 0x7da204962320>
|
keyword[def] identifier[handleOneClientMsg] ( identifier[self] , identifier[wrappedMsg] ):
literal[string]
keyword[try] :
identifier[vmsg] = identifier[self] . identifier[validateClientMsg] ( identifier[wrappedMsg] )
keyword[if] identifier[vmsg] :
identifier[self] . identifier[unpackClientMsg] (* identifier[vmsg] )
keyword[except] identifier[BlowUp] :
keyword[raise]
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
identifier[msg] , identifier[frm] = identifier[wrappedMsg]
identifier[friendly] = identifier[friendlyEx] ( identifier[ex] )
keyword[if] identifier[isinstance] ( identifier[ex] , identifier[SuspiciousClient] ):
identifier[self] . identifier[reportSuspiciousClient] ( identifier[frm] , identifier[friendly] )
identifier[self] . identifier[handleInvalidClientMsg] ( identifier[ex] , identifier[wrappedMsg] )
|
def handleOneClientMsg(self, wrappedMsg):
"""
Validate and process a client message
:param wrappedMsg: a message from a client
"""
try:
vmsg = self.validateClientMsg(wrappedMsg)
if vmsg:
self.unpackClientMsg(*vmsg) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except BlowUp:
raise # depends on [control=['except'], data=[]]
except Exception as ex:
(msg, frm) = wrappedMsg
friendly = friendlyEx(ex)
if isinstance(ex, SuspiciousClient):
self.reportSuspiciousClient(frm, friendly) # depends on [control=['if'], data=[]]
self.handleInvalidClientMsg(ex, wrappedMsg) # depends on [control=['except'], data=['ex']]
|
def _plunge(tasks, pausing, finish):
"""
(internal) calls the next method of weaved tasks until they are finished
or The ``Plumber`` instance is stopped see: ``Dagger.chinkup``.
"""
# If no result received either not started or start & stop
# could have been called before the plunger thread
while True:
if pausing():
tasks.stop()
try:
tasks.next()
except StopIteration:
finish(pausing())
break
|
def function[_plunge, parameter[tasks, pausing, finish]]:
constant[
(internal) calls the next method of weaved tasks until they are finished
or The ``Plumber`` instance is stopped see: ``Dagger.chinkup``.
]
while constant[True] begin[:]
if call[name[pausing], parameter[]] begin[:]
call[name[tasks].stop, parameter[]]
<ast.Try object at 0x7da1b257dfc0>
|
keyword[def] identifier[_plunge] ( identifier[tasks] , identifier[pausing] , identifier[finish] ):
literal[string]
keyword[while] keyword[True] :
keyword[if] identifier[pausing] ():
identifier[tasks] . identifier[stop] ()
keyword[try] :
identifier[tasks] . identifier[next] ()
keyword[except] identifier[StopIteration] :
identifier[finish] ( identifier[pausing] ())
keyword[break]
|
def _plunge(tasks, pausing, finish):
"""
(internal) calls the next method of weaved tasks until they are finished
or The ``Plumber`` instance is stopped see: ``Dagger.chinkup``.
"""
# If no result received either not started or start & stop
# could have been called before the plunger thread
while True:
if pausing():
tasks.stop() # depends on [control=['if'], data=[]]
try:
tasks.next() # depends on [control=['try'], data=[]]
except StopIteration:
finish(pausing())
break # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]]
|
def _check_dn(self, dn, attr_value):
"""Check dn attribute for issues."""
if dn is not None:
self._error('Two lines starting with dn: in one record.')
if not is_dn(attr_value):
self._error('No valid string-representation of '
'distinguished name %s.' % attr_value)
|
def function[_check_dn, parameter[self, dn, attr_value]]:
constant[Check dn attribute for issues.]
if compare[name[dn] is_not constant[None]] begin[:]
call[name[self]._error, parameter[constant[Two lines starting with dn: in one record.]]]
if <ast.UnaryOp object at 0x7da1b1b0d540> begin[:]
call[name[self]._error, parameter[binary_operation[constant[No valid string-representation of distinguished name %s.] <ast.Mod object at 0x7da2590d6920> name[attr_value]]]]
|
keyword[def] identifier[_check_dn] ( identifier[self] , identifier[dn] , identifier[attr_value] ):
literal[string]
keyword[if] identifier[dn] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_error] ( literal[string] )
keyword[if] keyword[not] identifier[is_dn] ( identifier[attr_value] ):
identifier[self] . identifier[_error] ( literal[string]
literal[string] % identifier[attr_value] )
|
def _check_dn(self, dn, attr_value):
"""Check dn attribute for issues."""
if dn is not None:
self._error('Two lines starting with dn: in one record.') # depends on [control=['if'], data=[]]
if not is_dn(attr_value):
self._error('No valid string-representation of distinguished name %s.' % attr_value) # depends on [control=['if'], data=[]]
|
def MatchBuildContext(self,
target_os,
target_arch,
target_package,
context=None):
"""Return true if target_platforms matches the supplied parameters.
Used by buildanddeploy to determine what clients need to be built.
Args:
target_os: which os we are building for in this run (linux, windows,
darwin)
target_arch: which arch we are building for in this run (i386, amd64)
target_package: which package type we are building (exe, dmg, deb, rpm)
context: config_lib context
Returns:
bool: True if target_platforms spec matches parameters.
"""
for spec in self.Get("ClientBuilder.target_platforms", context=context):
spec_os, arch, package_name = spec.split("_")
if (spec_os == target_os and arch == target_arch and
package_name == target_package):
return True
return False
|
def function[MatchBuildContext, parameter[self, target_os, target_arch, target_package, context]]:
constant[Return true if target_platforms matches the supplied parameters.
Used by buildanddeploy to determine what clients need to be built.
Args:
target_os: which os we are building for in this run (linux, windows,
darwin)
target_arch: which arch we are building for in this run (i386, amd64)
target_package: which package type we are building (exe, dmg, deb, rpm)
context: config_lib context
Returns:
bool: True if target_platforms spec matches parameters.
]
for taget[name[spec]] in starred[call[name[self].Get, parameter[constant[ClientBuilder.target_platforms]]]] begin[:]
<ast.Tuple object at 0x7da1b1c0d3c0> assign[=] call[name[spec].split, parameter[constant[_]]]
if <ast.BoolOp object at 0x7da1b1b47ee0> begin[:]
return[constant[True]]
return[constant[False]]
|
keyword[def] identifier[MatchBuildContext] ( identifier[self] ,
identifier[target_os] ,
identifier[target_arch] ,
identifier[target_package] ,
identifier[context] = keyword[None] ):
literal[string]
keyword[for] identifier[spec] keyword[in] identifier[self] . identifier[Get] ( literal[string] , identifier[context] = identifier[context] ):
identifier[spec_os] , identifier[arch] , identifier[package_name] = identifier[spec] . identifier[split] ( literal[string] )
keyword[if] ( identifier[spec_os] == identifier[target_os] keyword[and] identifier[arch] == identifier[target_arch] keyword[and]
identifier[package_name] == identifier[target_package] ):
keyword[return] keyword[True]
keyword[return] keyword[False]
|
def MatchBuildContext(self, target_os, target_arch, target_package, context=None):
"""Return true if target_platforms matches the supplied parameters.
Used by buildanddeploy to determine what clients need to be built.
Args:
target_os: which os we are building for in this run (linux, windows,
darwin)
target_arch: which arch we are building for in this run (i386, amd64)
target_package: which package type we are building (exe, dmg, deb, rpm)
context: config_lib context
Returns:
bool: True if target_platforms spec matches parameters.
"""
for spec in self.Get('ClientBuilder.target_platforms', context=context):
(spec_os, arch, package_name) = spec.split('_')
if spec_os == target_os and arch == target_arch and (package_name == target_package):
return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['spec']]
return False
|
def set_mask(self, context_len, context):
"""
sets self.mask which is applied before softmax
ones for inactive context fields, zeros for active context fields
:param context_len: b
:param context: if batch_first: (b x t_k x n) else: (t_k x b x n)
self.mask: (b x t_k)
"""
if self.batch_first:
max_len = context.size(1)
else:
max_len = context.size(0)
indices = torch.arange(0, max_len, dtype=torch.int64,
device=context.device)
self.mask = indices >= (context_len.unsqueeze(1))
|
def function[set_mask, parameter[self, context_len, context]]:
constant[
sets self.mask which is applied before softmax
ones for inactive context fields, zeros for active context fields
:param context_len: b
:param context: if batch_first: (b x t_k x n) else: (t_k x b x n)
self.mask: (b x t_k)
]
if name[self].batch_first begin[:]
variable[max_len] assign[=] call[name[context].size, parameter[constant[1]]]
variable[indices] assign[=] call[name[torch].arange, parameter[constant[0], name[max_len]]]
name[self].mask assign[=] compare[name[indices] greater_or_equal[>=] call[name[context_len].unsqueeze, parameter[constant[1]]]]
|
keyword[def] identifier[set_mask] ( identifier[self] , identifier[context_len] , identifier[context] ):
literal[string]
keyword[if] identifier[self] . identifier[batch_first] :
identifier[max_len] = identifier[context] . identifier[size] ( literal[int] )
keyword[else] :
identifier[max_len] = identifier[context] . identifier[size] ( literal[int] )
identifier[indices] = identifier[torch] . identifier[arange] ( literal[int] , identifier[max_len] , identifier[dtype] = identifier[torch] . identifier[int64] ,
identifier[device] = identifier[context] . identifier[device] )
identifier[self] . identifier[mask] = identifier[indices] >=( identifier[context_len] . identifier[unsqueeze] ( literal[int] ))
|
def set_mask(self, context_len, context):
"""
sets self.mask which is applied before softmax
ones for inactive context fields, zeros for active context fields
:param context_len: b
:param context: if batch_first: (b x t_k x n) else: (t_k x b x n)
self.mask: (b x t_k)
"""
if self.batch_first:
max_len = context.size(1) # depends on [control=['if'], data=[]]
else:
max_len = context.size(0)
indices = torch.arange(0, max_len, dtype=torch.int64, device=context.device)
self.mask = indices >= context_len.unsqueeze(1)
|
def get_state(self, scaling_group):
"""
Returns the current state of the specified scaling group as a
dictionary.
"""
uri = "/%s/%s/state" % (self.uri_base, utils.get_id(scaling_group))
resp, resp_body = self.api.method_get(uri)
data = resp_body["group"]
ret = {}
ret["active"] = [itm["id"] for itm in data["active"]]
ret["active_capacity"] = data["activeCapacity"]
ret["desired_capacity"] = data["desiredCapacity"]
ret["pending_capacity"] = data["pendingCapacity"]
ret["paused"] = data["paused"]
return ret
|
def function[get_state, parameter[self, scaling_group]]:
constant[
Returns the current state of the specified scaling group as a
dictionary.
]
variable[uri] assign[=] binary_operation[constant[/%s/%s/state] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b056cf10>, <ast.Call object at 0x7da1b056e800>]]]
<ast.Tuple object at 0x7da1b056e140> assign[=] call[name[self].api.method_get, parameter[name[uri]]]
variable[data] assign[=] call[name[resp_body]][constant[group]]
variable[ret] assign[=] dictionary[[], []]
call[name[ret]][constant[active]] assign[=] <ast.ListComp object at 0x7da1b056d900>
call[name[ret]][constant[active_capacity]] assign[=] call[name[data]][constant[activeCapacity]]
call[name[ret]][constant[desired_capacity]] assign[=] call[name[data]][constant[desiredCapacity]]
call[name[ret]][constant[pending_capacity]] assign[=] call[name[data]][constant[pendingCapacity]]
call[name[ret]][constant[paused]] assign[=] call[name[data]][constant[paused]]
return[name[ret]]
|
keyword[def] identifier[get_state] ( identifier[self] , identifier[scaling_group] ):
literal[string]
identifier[uri] = literal[string] %( identifier[self] . identifier[uri_base] , identifier[utils] . identifier[get_id] ( identifier[scaling_group] ))
identifier[resp] , identifier[resp_body] = identifier[self] . identifier[api] . identifier[method_get] ( identifier[uri] )
identifier[data] = identifier[resp_body] [ literal[string] ]
identifier[ret] ={}
identifier[ret] [ literal[string] ]=[ identifier[itm] [ literal[string] ] keyword[for] identifier[itm] keyword[in] identifier[data] [ literal[string] ]]
identifier[ret] [ literal[string] ]= identifier[data] [ literal[string] ]
identifier[ret] [ literal[string] ]= identifier[data] [ literal[string] ]
identifier[ret] [ literal[string] ]= identifier[data] [ literal[string] ]
identifier[ret] [ literal[string] ]= identifier[data] [ literal[string] ]
keyword[return] identifier[ret]
|
def get_state(self, scaling_group):
"""
Returns the current state of the specified scaling group as a
dictionary.
"""
uri = '/%s/%s/state' % (self.uri_base, utils.get_id(scaling_group))
(resp, resp_body) = self.api.method_get(uri)
data = resp_body['group']
ret = {}
ret['active'] = [itm['id'] for itm in data['active']]
ret['active_capacity'] = data['activeCapacity']
ret['desired_capacity'] = data['desiredCapacity']
ret['pending_capacity'] = data['pendingCapacity']
ret['paused'] = data['paused']
return ret
|
def unused_port(hostname):
"""Return a port that is unused on the current host."""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind((hostname, 0))
return s.getsockname()[1]
|
def function[unused_port, parameter[hostname]]:
constant[Return a port that is unused on the current host.]
with call[name[socket].socket, parameter[name[socket].AF_INET, name[socket].SOCK_STREAM]] begin[:]
call[name[s].bind, parameter[tuple[[<ast.Name object at 0x7da1b0791ab0>, <ast.Constant object at 0x7da1b07901c0>]]]]
return[call[call[name[s].getsockname, parameter[]]][constant[1]]]
|
keyword[def] identifier[unused_port] ( identifier[hostname] ):
literal[string]
keyword[with] identifier[socket] . identifier[socket] ( identifier[socket] . identifier[AF_INET] , identifier[socket] . identifier[SOCK_STREAM] ) keyword[as] identifier[s] :
identifier[s] . identifier[bind] (( identifier[hostname] , literal[int] ))
keyword[return] identifier[s] . identifier[getsockname] ()[ literal[int] ]
|
def unused_port(hostname):
"""Return a port that is unused on the current host."""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind((hostname, 0))
return s.getsockname()[1] # depends on [control=['with'], data=['s']]
|
def extract_endpoint_arguments(endpoint):
"""Extract the argument documentation from the endpoint."""
ep_args = endpoint._arguments
if ep_args is None:
return None
arg_docs = { k: format_endpoint_argument_doc(a) \
for k, a in ep_args.iteritems() }
return arg_docs
|
def function[extract_endpoint_arguments, parameter[endpoint]]:
constant[Extract the argument documentation from the endpoint.]
variable[ep_args] assign[=] name[endpoint]._arguments
if compare[name[ep_args] is constant[None]] begin[:]
return[constant[None]]
variable[arg_docs] assign[=] <ast.DictComp object at 0x7da18ede7160>
return[name[arg_docs]]
|
keyword[def] identifier[extract_endpoint_arguments] ( identifier[endpoint] ):
literal[string]
identifier[ep_args] = identifier[endpoint] . identifier[_arguments]
keyword[if] identifier[ep_args] keyword[is] keyword[None] :
keyword[return] keyword[None]
identifier[arg_docs] ={ identifier[k] : identifier[format_endpoint_argument_doc] ( identifier[a] ) keyword[for] identifier[k] , identifier[a] keyword[in] identifier[ep_args] . identifier[iteritems] ()}
keyword[return] identifier[arg_docs]
|
def extract_endpoint_arguments(endpoint):
"""Extract the argument documentation from the endpoint."""
ep_args = endpoint._arguments
if ep_args is None:
return None # depends on [control=['if'], data=[]]
arg_docs = {k: format_endpoint_argument_doc(a) for (k, a) in ep_args.iteritems()}
return arg_docs
|
def _find_convertable_object(self, data):
"""
Get the first instance of a `self.pod_types`
"""
data = list(data)
convertable_object_idxs = [
idx
for idx, obj
in enumerate(data)
if obj.get('kind') in self.pod_types.keys()
]
if len(convertable_object_idxs) < 1:
raise Exception("Kubernetes config didn't contain any of {}".format(
', '.join(self.pod_types.keys())
))
return list(data)[convertable_object_idxs[0]]
|
def function[_find_convertable_object, parameter[self, data]]:
constant[
Get the first instance of a `self.pod_types`
]
variable[data] assign[=] call[name[list], parameter[name[data]]]
variable[convertable_object_idxs] assign[=] <ast.ListComp object at 0x7da2041d8cd0>
if compare[call[name[len], parameter[name[convertable_object_idxs]]] less[<] constant[1]] begin[:]
<ast.Raise object at 0x7da2041da2c0>
return[call[call[name[list], parameter[name[data]]]][call[name[convertable_object_idxs]][constant[0]]]]
|
keyword[def] identifier[_find_convertable_object] ( identifier[self] , identifier[data] ):
literal[string]
identifier[data] = identifier[list] ( identifier[data] )
identifier[convertable_object_idxs] =[
identifier[idx]
keyword[for] identifier[idx] , identifier[obj]
keyword[in] identifier[enumerate] ( identifier[data] )
keyword[if] identifier[obj] . identifier[get] ( literal[string] ) keyword[in] identifier[self] . identifier[pod_types] . identifier[keys] ()
]
keyword[if] identifier[len] ( identifier[convertable_object_idxs] )< literal[int] :
keyword[raise] identifier[Exception] ( literal[string] . identifier[format] (
literal[string] . identifier[join] ( identifier[self] . identifier[pod_types] . identifier[keys] ())
))
keyword[return] identifier[list] ( identifier[data] )[ identifier[convertable_object_idxs] [ literal[int] ]]
|
def _find_convertable_object(self, data):
"""
Get the first instance of a `self.pod_types`
"""
data = list(data)
convertable_object_idxs = [idx for (idx, obj) in enumerate(data) if obj.get('kind') in self.pod_types.keys()]
if len(convertable_object_idxs) < 1:
raise Exception("Kubernetes config didn't contain any of {}".format(', '.join(self.pod_types.keys()))) # depends on [control=['if'], data=[]]
return list(data)[convertable_object_idxs[0]]
|
def __parse_tostr(self, text, **kwargs):
'''Builds and returns the MeCab function for parsing Unicode text.
Args:
fn_name: MeCab function name that determines the function
behavior, either 'mecab_sparse_tostr' or
'mecab_nbest_sparse_tostr'.
Returns:
A function definition, tailored to parsing Unicode text and
returning the result as a string suitable for display on stdout,
using either the default or N-best behavior.
'''
n = self.options.get('nbest', 1)
if self._KW_BOUNDARY in kwargs:
patt = kwargs.get(self._KW_BOUNDARY, '.')
tokens = list(self.__split_pattern(text, patt))
text = ''.join([t[0] for t in tokens])
btext = self.__str2bytes(text)
self.__mecab.mecab_lattice_set_sentence(self.lattice, btext)
bpos = 0
self.__mecab.mecab_lattice_set_boundary_constraint(
self.lattice, bpos, self.MECAB_TOKEN_BOUNDARY)
for (token, match) in tokens:
bpos += 1
if match:
mark = self.MECAB_INSIDE_TOKEN
else:
mark = self.MECAB_ANY_BOUNDARY
for _ in range(1, len(self.__str2bytes(token))):
self.__mecab.mecab_lattice_set_boundary_constraint(
self.lattice, bpos, mark)
bpos += 1
self.__mecab.mecab_lattice_set_boundary_constraint(
self.lattice, bpos, self.MECAB_TOKEN_BOUNDARY)
elif self._KW_FEATURE in kwargs:
features = kwargs.get(self._KW_FEATURE, ())
fd = {morph: self.__str2bytes(feat) for morph, feat in features}
tokens = self.__split_features(text, [e[0] for e in features])
text = ''.join([t[0] for t in tokens])
btext = self.__str2bytes(text)
self.__mecab.mecab_lattice_set_sentence(self.lattice, btext)
bpos = 0
for chunk, match in tokens:
c = len(self.__str2bytes(chunk))
if match == True:
self.__mecab.mecab_lattice_set_feature_constraint(
self.lattice, bpos, bpos+c, fd[chunk])
bpos += c
else:
btext = self.__str2bytes(text)
self.__mecab.mecab_lattice_set_sentence(self.lattice, btext)
self.__mecab.mecab_parse_lattice(self.tagger, self.lattice)
if n > 1:
res = self.__mecab.mecab_lattice_nbest_tostr(self.lattice, n)
else:
res = self.__mecab.mecab_lattice_tostr(self.lattice)
if res != self.__ffi.NULL:
raw = self.__ffi.string(res)
return self.__bytes2str(raw).strip()
else:
err = self.__mecab.mecab_lattice_strerror(self.lattice)
logger.error(self.__bytes2str(self.__ffi.string(err)))
raise MeCabError(self.__bytes2str(self.__ffi.string(err)))
|
def function[__parse_tostr, parameter[self, text]]:
constant[Builds and returns the MeCab function for parsing Unicode text.
Args:
fn_name: MeCab function name that determines the function
behavior, either 'mecab_sparse_tostr' or
'mecab_nbest_sparse_tostr'.
Returns:
A function definition, tailored to parsing Unicode text and
returning the result as a string suitable for display on stdout,
using either the default or N-best behavior.
]
variable[n] assign[=] call[name[self].options.get, parameter[constant[nbest], constant[1]]]
if compare[name[self]._KW_BOUNDARY in name[kwargs]] begin[:]
variable[patt] assign[=] call[name[kwargs].get, parameter[name[self]._KW_BOUNDARY, constant[.]]]
variable[tokens] assign[=] call[name[list], parameter[call[name[self].__split_pattern, parameter[name[text], name[patt]]]]]
variable[text] assign[=] call[constant[].join, parameter[<ast.ListComp object at 0x7da1b0fb8d60>]]
variable[btext] assign[=] call[name[self].__str2bytes, parameter[name[text]]]
call[name[self].__mecab.mecab_lattice_set_sentence, parameter[name[self].lattice, name[btext]]]
variable[bpos] assign[=] constant[0]
call[name[self].__mecab.mecab_lattice_set_boundary_constraint, parameter[name[self].lattice, name[bpos], name[self].MECAB_TOKEN_BOUNDARY]]
for taget[tuple[[<ast.Name object at 0x7da1b0fba740>, <ast.Name object at 0x7da1b0fbb070>]]] in starred[name[tokens]] begin[:]
<ast.AugAssign object at 0x7da1b0fbb5e0>
if name[match] begin[:]
variable[mark] assign[=] name[self].MECAB_INSIDE_TOKEN
for taget[name[_]] in starred[call[name[range], parameter[constant[1], call[name[len], parameter[call[name[self].__str2bytes, parameter[name[token]]]]]]]] begin[:]
call[name[self].__mecab.mecab_lattice_set_boundary_constraint, parameter[name[self].lattice, name[bpos], name[mark]]]
<ast.AugAssign object at 0x7da1b0fb8910>
call[name[self].__mecab.mecab_lattice_set_boundary_constraint, parameter[name[self].lattice, name[bpos], name[self].MECAB_TOKEN_BOUNDARY]]
call[name[self].__mecab.mecab_parse_lattice, parameter[name[self].tagger, name[self].lattice]]
if compare[name[n] greater[>] constant[1]] begin[:]
variable[res] assign[=] call[name[self].__mecab.mecab_lattice_nbest_tostr, parameter[name[self].lattice, name[n]]]
if compare[name[res] not_equal[!=] name[self].__ffi.NULL] begin[:]
variable[raw] assign[=] call[name[self].__ffi.string, parameter[name[res]]]
return[call[call[name[self].__bytes2str, parameter[name[raw]]].strip, parameter[]]]
|
keyword[def] identifier[__parse_tostr] ( identifier[self] , identifier[text] ,** identifier[kwargs] ):
literal[string]
identifier[n] = identifier[self] . identifier[options] . identifier[get] ( literal[string] , literal[int] )
keyword[if] identifier[self] . identifier[_KW_BOUNDARY] keyword[in] identifier[kwargs] :
identifier[patt] = identifier[kwargs] . identifier[get] ( identifier[self] . identifier[_KW_BOUNDARY] , literal[string] )
identifier[tokens] = identifier[list] ( identifier[self] . identifier[__split_pattern] ( identifier[text] , identifier[patt] ))
identifier[text] = literal[string] . identifier[join] ([ identifier[t] [ literal[int] ] keyword[for] identifier[t] keyword[in] identifier[tokens] ])
identifier[btext] = identifier[self] . identifier[__str2bytes] ( identifier[text] )
identifier[self] . identifier[__mecab] . identifier[mecab_lattice_set_sentence] ( identifier[self] . identifier[lattice] , identifier[btext] )
identifier[bpos] = literal[int]
identifier[self] . identifier[__mecab] . identifier[mecab_lattice_set_boundary_constraint] (
identifier[self] . identifier[lattice] , identifier[bpos] , identifier[self] . identifier[MECAB_TOKEN_BOUNDARY] )
keyword[for] ( identifier[token] , identifier[match] ) keyword[in] identifier[tokens] :
identifier[bpos] += literal[int]
keyword[if] identifier[match] :
identifier[mark] = identifier[self] . identifier[MECAB_INSIDE_TOKEN]
keyword[else] :
identifier[mark] = identifier[self] . identifier[MECAB_ANY_BOUNDARY]
keyword[for] identifier[_] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[self] . identifier[__str2bytes] ( identifier[token] ))):
identifier[self] . identifier[__mecab] . identifier[mecab_lattice_set_boundary_constraint] (
identifier[self] . identifier[lattice] , identifier[bpos] , identifier[mark] )
identifier[bpos] += literal[int]
identifier[self] . identifier[__mecab] . identifier[mecab_lattice_set_boundary_constraint] (
identifier[self] . identifier[lattice] , identifier[bpos] , identifier[self] . identifier[MECAB_TOKEN_BOUNDARY] )
keyword[elif] identifier[self] . identifier[_KW_FEATURE] keyword[in] identifier[kwargs] :
identifier[features] = identifier[kwargs] . identifier[get] ( identifier[self] . identifier[_KW_FEATURE] ,())
identifier[fd] ={ identifier[morph] : identifier[self] . identifier[__str2bytes] ( identifier[feat] ) keyword[for] identifier[morph] , identifier[feat] keyword[in] identifier[features] }
identifier[tokens] = identifier[self] . identifier[__split_features] ( identifier[text] ,[ identifier[e] [ literal[int] ] keyword[for] identifier[e] keyword[in] identifier[features] ])
identifier[text] = literal[string] . identifier[join] ([ identifier[t] [ literal[int] ] keyword[for] identifier[t] keyword[in] identifier[tokens] ])
identifier[btext] = identifier[self] . identifier[__str2bytes] ( identifier[text] )
identifier[self] . identifier[__mecab] . identifier[mecab_lattice_set_sentence] ( identifier[self] . identifier[lattice] , identifier[btext] )
identifier[bpos] = literal[int]
keyword[for] identifier[chunk] , identifier[match] keyword[in] identifier[tokens] :
identifier[c] = identifier[len] ( identifier[self] . identifier[__str2bytes] ( identifier[chunk] ))
keyword[if] identifier[match] == keyword[True] :
identifier[self] . identifier[__mecab] . identifier[mecab_lattice_set_feature_constraint] (
identifier[self] . identifier[lattice] , identifier[bpos] , identifier[bpos] + identifier[c] , identifier[fd] [ identifier[chunk] ])
identifier[bpos] += identifier[c]
keyword[else] :
identifier[btext] = identifier[self] . identifier[__str2bytes] ( identifier[text] )
identifier[self] . identifier[__mecab] . identifier[mecab_lattice_set_sentence] ( identifier[self] . identifier[lattice] , identifier[btext] )
identifier[self] . identifier[__mecab] . identifier[mecab_parse_lattice] ( identifier[self] . identifier[tagger] , identifier[self] . identifier[lattice] )
keyword[if] identifier[n] > literal[int] :
identifier[res] = identifier[self] . identifier[__mecab] . identifier[mecab_lattice_nbest_tostr] ( identifier[self] . identifier[lattice] , identifier[n] )
keyword[else] :
identifier[res] = identifier[self] . identifier[__mecab] . identifier[mecab_lattice_tostr] ( identifier[self] . identifier[lattice] )
keyword[if] identifier[res] != identifier[self] . identifier[__ffi] . identifier[NULL] :
identifier[raw] = identifier[self] . identifier[__ffi] . identifier[string] ( identifier[res] )
keyword[return] identifier[self] . identifier[__bytes2str] ( identifier[raw] ). identifier[strip] ()
keyword[else] :
identifier[err] = identifier[self] . identifier[__mecab] . identifier[mecab_lattice_strerror] ( identifier[self] . identifier[lattice] )
identifier[logger] . identifier[error] ( identifier[self] . identifier[__bytes2str] ( identifier[self] . identifier[__ffi] . identifier[string] ( identifier[err] )))
keyword[raise] identifier[MeCabError] ( identifier[self] . identifier[__bytes2str] ( identifier[self] . identifier[__ffi] . identifier[string] ( identifier[err] )))
|
def __parse_tostr(self, text, **kwargs):
"""Builds and returns the MeCab function for parsing Unicode text.
Args:
fn_name: MeCab function name that determines the function
behavior, either 'mecab_sparse_tostr' or
'mecab_nbest_sparse_tostr'.
Returns:
A function definition, tailored to parsing Unicode text and
returning the result as a string suitable for display on stdout,
using either the default or N-best behavior.
"""
n = self.options.get('nbest', 1)
if self._KW_BOUNDARY in kwargs:
patt = kwargs.get(self._KW_BOUNDARY, '.')
tokens = list(self.__split_pattern(text, patt))
text = ''.join([t[0] for t in tokens])
btext = self.__str2bytes(text)
self.__mecab.mecab_lattice_set_sentence(self.lattice, btext)
bpos = 0
self.__mecab.mecab_lattice_set_boundary_constraint(self.lattice, bpos, self.MECAB_TOKEN_BOUNDARY)
for (token, match) in tokens:
bpos += 1
if match:
mark = self.MECAB_INSIDE_TOKEN # depends on [control=['if'], data=[]]
else:
mark = self.MECAB_ANY_BOUNDARY
for _ in range(1, len(self.__str2bytes(token))):
self.__mecab.mecab_lattice_set_boundary_constraint(self.lattice, bpos, mark)
bpos += 1 # depends on [control=['for'], data=[]]
self.__mecab.mecab_lattice_set_boundary_constraint(self.lattice, bpos, self.MECAB_TOKEN_BOUNDARY) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['kwargs']]
elif self._KW_FEATURE in kwargs:
features = kwargs.get(self._KW_FEATURE, ())
fd = {morph: self.__str2bytes(feat) for (morph, feat) in features}
tokens = self.__split_features(text, [e[0] for e in features])
text = ''.join([t[0] for t in tokens])
btext = self.__str2bytes(text)
self.__mecab.mecab_lattice_set_sentence(self.lattice, btext)
bpos = 0
for (chunk, match) in tokens:
c = len(self.__str2bytes(chunk))
if match == True:
self.__mecab.mecab_lattice_set_feature_constraint(self.lattice, bpos, bpos + c, fd[chunk]) # depends on [control=['if'], data=[]]
bpos += c # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['kwargs']]
else:
btext = self.__str2bytes(text)
self.__mecab.mecab_lattice_set_sentence(self.lattice, btext)
self.__mecab.mecab_parse_lattice(self.tagger, self.lattice)
if n > 1:
res = self.__mecab.mecab_lattice_nbest_tostr(self.lattice, n) # depends on [control=['if'], data=['n']]
else:
res = self.__mecab.mecab_lattice_tostr(self.lattice)
if res != self.__ffi.NULL:
raw = self.__ffi.string(res)
return self.__bytes2str(raw).strip() # depends on [control=['if'], data=['res']]
else:
err = self.__mecab.mecab_lattice_strerror(self.lattice)
logger.error(self.__bytes2str(self.__ffi.string(err)))
raise MeCabError(self.__bytes2str(self.__ffi.string(err)))
|
def debug_callback(event, *args, **kwds):
'''Example callback, useful for debugging.
'''
l = ['event %s' % (event.type,)]
if args:
l.extend(map(str, args))
if kwds:
l.extend(sorted('%s=%s' % t for t in kwds.items()))
print('Debug callback (%s)' % ', '.join(l))
|
def function[debug_callback, parameter[event]]:
constant[Example callback, useful for debugging.
]
variable[l] assign[=] list[[<ast.BinOp object at 0x7da1b162b5b0>]]
if name[args] begin[:]
call[name[l].extend, parameter[call[name[map], parameter[name[str], name[args]]]]]
if name[kwds] begin[:]
call[name[l].extend, parameter[call[name[sorted], parameter[<ast.GeneratorExp object at 0x7da1b1629270>]]]]
call[name[print], parameter[binary_operation[constant[Debug callback (%s)] <ast.Mod object at 0x7da2590d6920> call[constant[, ].join, parameter[name[l]]]]]]
|
keyword[def] identifier[debug_callback] ( identifier[event] ,* identifier[args] ,** identifier[kwds] ):
literal[string]
identifier[l] =[ literal[string] %( identifier[event] . identifier[type] ,)]
keyword[if] identifier[args] :
identifier[l] . identifier[extend] ( identifier[map] ( identifier[str] , identifier[args] ))
keyword[if] identifier[kwds] :
identifier[l] . identifier[extend] ( identifier[sorted] ( literal[string] % identifier[t] keyword[for] identifier[t] keyword[in] identifier[kwds] . identifier[items] ()))
identifier[print] ( literal[string] % literal[string] . identifier[join] ( identifier[l] ))
|
def debug_callback(event, *args, **kwds):
"""Example callback, useful for debugging.
"""
l = ['event %s' % (event.type,)]
if args:
l.extend(map(str, args)) # depends on [control=['if'], data=[]]
if kwds:
l.extend(sorted(('%s=%s' % t for t in kwds.items()))) # depends on [control=['if'], data=[]]
print('Debug callback (%s)' % ', '.join(l))
|
def _get_cursor(self, n_retries=1):
"""Returns a context manager for obtained from a single or pooled
connection, and sets the PostgreSQL search_path to the schema
specified in the connection URL.
Although *connections* are threadsafe, *cursors* are bound to
connections and are *not* threadsafe. Do not share cursors
across threads.
Use this funciton like this::
with hdp._get_cursor() as cur:
# your code
Do not call this function outside a contextmanager.
"""
n_tries_rem = n_retries + 1
while n_tries_rem > 0:
try:
conn = self._pool.getconn() if self.pooling else self._conn
# autocommit=True obviates closing explicitly
conn.autocommit = True
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute("set search_path = {self.url.schema};".format(self=self))
yield cur
# contextmanager executes these when context exits
cur.close()
if self.pooling:
self._pool.putconn(conn)
break
except psycopg2.OperationalError:
_logger.warning(
"Lost connection to {url}; attempting reconnect".format(url=self.url))
if self.pooling:
self._pool.closeall()
self._connect()
_logger.warning("Reconnected to {url}".format(url=self.url))
n_tries_rem -= 1
else:
# N.B. Probably never reached
raise HGVSError("Permanently lost connection to {url} ({n} retries)".format(
url=self.url, n=n_retries))
|
def function[_get_cursor, parameter[self, n_retries]]:
constant[Returns a context manager for obtained from a single or pooled
connection, and sets the PostgreSQL search_path to the schema
specified in the connection URL.
Although *connections* are threadsafe, *cursors* are bound to
connections and are *not* threadsafe. Do not share cursors
across threads.
Use this funciton like this::
with hdp._get_cursor() as cur:
# your code
Do not call this function outside a contextmanager.
]
variable[n_tries_rem] assign[=] binary_operation[name[n_retries] + constant[1]]
while compare[name[n_tries_rem] greater[>] constant[0]] begin[:]
<ast.Try object at 0x7da1b20683d0>
<ast.AugAssign object at 0x7da1b2069f00>
|
keyword[def] identifier[_get_cursor] ( identifier[self] , identifier[n_retries] = literal[int] ):
literal[string]
identifier[n_tries_rem] = identifier[n_retries] + literal[int]
keyword[while] identifier[n_tries_rem] > literal[int] :
keyword[try] :
identifier[conn] = identifier[self] . identifier[_pool] . identifier[getconn] () keyword[if] identifier[self] . identifier[pooling] keyword[else] identifier[self] . identifier[_conn]
identifier[conn] . identifier[autocommit] = keyword[True]
identifier[cur] = identifier[conn] . identifier[cursor] ( identifier[cursor_factory] = identifier[psycopg2] . identifier[extras] . identifier[DictCursor] )
identifier[cur] . identifier[execute] ( literal[string] . identifier[format] ( identifier[self] = identifier[self] ))
keyword[yield] identifier[cur]
identifier[cur] . identifier[close] ()
keyword[if] identifier[self] . identifier[pooling] :
identifier[self] . identifier[_pool] . identifier[putconn] ( identifier[conn] )
keyword[break]
keyword[except] identifier[psycopg2] . identifier[OperationalError] :
identifier[_logger] . identifier[warning] (
literal[string] . identifier[format] ( identifier[url] = identifier[self] . identifier[url] ))
keyword[if] identifier[self] . identifier[pooling] :
identifier[self] . identifier[_pool] . identifier[closeall] ()
identifier[self] . identifier[_connect] ()
identifier[_logger] . identifier[warning] ( literal[string] . identifier[format] ( identifier[url] = identifier[self] . identifier[url] ))
identifier[n_tries_rem] -= literal[int]
keyword[else] :
keyword[raise] identifier[HGVSError] ( literal[string] . identifier[format] (
identifier[url] = identifier[self] . identifier[url] , identifier[n] = identifier[n_retries] ))
|
def _get_cursor(self, n_retries=1):
"""Returns a context manager for obtained from a single or pooled
connection, and sets the PostgreSQL search_path to the schema
specified in the connection URL.
Although *connections* are threadsafe, *cursors* are bound to
connections and are *not* threadsafe. Do not share cursors
across threads.
Use this funciton like this::
with hdp._get_cursor() as cur:
# your code
Do not call this function outside a contextmanager.
"""
n_tries_rem = n_retries + 1
while n_tries_rem > 0:
try:
conn = self._pool.getconn() if self.pooling else self._conn
# autocommit=True obviates closing explicitly
conn.autocommit = True
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute('set search_path = {self.url.schema};'.format(self=self))
yield cur
# contextmanager executes these when context exits
cur.close()
if self.pooling:
self._pool.putconn(conn) # depends on [control=['if'], data=[]]
break # depends on [control=['try'], data=[]]
except psycopg2.OperationalError:
_logger.warning('Lost connection to {url}; attempting reconnect'.format(url=self.url))
if self.pooling:
self._pool.closeall() # depends on [control=['if'], data=[]]
self._connect()
_logger.warning('Reconnected to {url}'.format(url=self.url)) # depends on [control=['except'], data=[]]
n_tries_rem -= 1 # depends on [control=['while'], data=['n_tries_rem']]
else:
# N.B. Probably never reached
raise HGVSError('Permanently lost connection to {url} ({n} retries)'.format(url=self.url, n=n_retries))
|
def attribute(path, name):
"""Returns the two numbers found behind --[A-Z] in path. If several matches
are found, the last one is returned.
Parameters
----------
path : string
String with path of file/folder to get attribute from.
name : string
Name of attribute to get. Should be A-Z or a-z (implicit converted to
uppercase).
Returns
-------
integer
Returns number found in path behind --name as an integer.
"""
matches = re.findall('--' + name.upper() + '([0-9]{2})', path)
if matches:
return int(matches[-1])
else:
return None
|
def function[attribute, parameter[path, name]]:
constant[Returns the two numbers found behind --[A-Z] in path. If several matches
are found, the last one is returned.
Parameters
----------
path : string
String with path of file/folder to get attribute from.
name : string
Name of attribute to get. Should be A-Z or a-z (implicit converted to
uppercase).
Returns
-------
integer
Returns number found in path behind --name as an integer.
]
variable[matches] assign[=] call[name[re].findall, parameter[binary_operation[binary_operation[constant[--] + call[name[name].upper, parameter[]]] + constant[([0-9]{2})]], name[path]]]
if name[matches] begin[:]
return[call[name[int], parameter[call[name[matches]][<ast.UnaryOp object at 0x7da1b008cc70>]]]]
|
keyword[def] identifier[attribute] ( identifier[path] , identifier[name] ):
literal[string]
identifier[matches] = identifier[re] . identifier[findall] ( literal[string] + identifier[name] . identifier[upper] ()+ literal[string] , identifier[path] )
keyword[if] identifier[matches] :
keyword[return] identifier[int] ( identifier[matches] [- literal[int] ])
keyword[else] :
keyword[return] keyword[None]
|
def attribute(path, name):
"""Returns the two numbers found behind --[A-Z] in path. If several matches
are found, the last one is returned.
Parameters
----------
path : string
String with path of file/folder to get attribute from.
name : string
Name of attribute to get. Should be A-Z or a-z (implicit converted to
uppercase).
Returns
-------
integer
Returns number found in path behind --name as an integer.
"""
matches = re.findall('--' + name.upper() + '([0-9]{2})', path)
if matches:
return int(matches[-1]) # depends on [control=['if'], data=[]]
else:
return None
|
def get_cg_volumes(self, group_id):
""" return all non snapshots volumes in cg """
for volume in self.xcli_client.cmd.vol_list(cg=group_id):
if volume.snapshot_of == '':
yield volume.name
|
def function[get_cg_volumes, parameter[self, group_id]]:
constant[ return all non snapshots volumes in cg ]
for taget[name[volume]] in starred[call[name[self].xcli_client.cmd.vol_list, parameter[]]] begin[:]
if compare[name[volume].snapshot_of equal[==] constant[]] begin[:]
<ast.Yield object at 0x7da1b1909ae0>
|
keyword[def] identifier[get_cg_volumes] ( identifier[self] , identifier[group_id] ):
literal[string]
keyword[for] identifier[volume] keyword[in] identifier[self] . identifier[xcli_client] . identifier[cmd] . identifier[vol_list] ( identifier[cg] = identifier[group_id] ):
keyword[if] identifier[volume] . identifier[snapshot_of] == literal[string] :
keyword[yield] identifier[volume] . identifier[name]
|
def get_cg_volumes(self, group_id):
""" return all non snapshots volumes in cg """
for volume in self.xcli_client.cmd.vol_list(cg=group_id):
if volume.snapshot_of == '':
yield volume.name # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['volume']]
|
def fsroot(self):
"""Returns the file system root."""
if self.osname == 'windows':
return '{}:\\'.format(
self._handler.inspect_get_drive_mappings(self._root)[0][0])
else:
return self._handler.inspect_get_mountpoints(self._root)[0][0]
|
def function[fsroot, parameter[self]]:
constant[Returns the file system root.]
if compare[name[self].osname equal[==] constant[windows]] begin[:]
return[call[constant[{}:\].format, parameter[call[call[call[name[self]._handler.inspect_get_drive_mappings, parameter[name[self]._root]]][constant[0]]][constant[0]]]]]
|
keyword[def] identifier[fsroot] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[osname] == literal[string] :
keyword[return] literal[string] . identifier[format] (
identifier[self] . identifier[_handler] . identifier[inspect_get_drive_mappings] ( identifier[self] . identifier[_root] )[ literal[int] ][ literal[int] ])
keyword[else] :
keyword[return] identifier[self] . identifier[_handler] . identifier[inspect_get_mountpoints] ( identifier[self] . identifier[_root] )[ literal[int] ][ literal[int] ]
|
def fsroot(self):
"""Returns the file system root."""
if self.osname == 'windows':
return '{}:\\'.format(self._handler.inspect_get_drive_mappings(self._root)[0][0]) # depends on [control=['if'], data=[]]
else:
return self._handler.inspect_get_mountpoints(self._root)[0][0]
|
def plot_decision_boundary(model, X, y, step=0.1, figsize=(10, 8), alpha=0.4, size=20):
"""Plots the classification decision boundary of `model` on `X` with labels `y`.
Using numpy and matplotlib.
"""
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, step),
np.arange(y_min, y_max, step))
f, ax = plt.subplots(figsize=figsize)
Z = model.predict(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
ax.contourf(xx, yy, Z, alpha=alpha)
ax.scatter(X[:, 0], X[:, 1], c=y, s=size, edgecolor='k')
plt.show()
|
def function[plot_decision_boundary, parameter[model, X, y, step, figsize, alpha, size]]:
constant[Plots the classification decision boundary of `model` on `X` with labels `y`.
Using numpy and matplotlib.
]
<ast.Tuple object at 0x7da1b1e091e0> assign[=] tuple[[<ast.BinOp object at 0x7da1b1e08c40>, <ast.BinOp object at 0x7da1b1e0a0b0>]]
<ast.Tuple object at 0x7da1b1e099c0> assign[=] tuple[[<ast.BinOp object at 0x7da1b1e0a050>, <ast.BinOp object at 0x7da1b1e0a770>]]
<ast.Tuple object at 0x7da1b1e0b910> assign[=] call[name[np].meshgrid, parameter[call[name[np].arange, parameter[name[x_min], name[x_max], name[step]]], call[name[np].arange, parameter[name[y_min], name[y_max], name[step]]]]]
<ast.Tuple object at 0x7da1b1e08070> assign[=] call[name[plt].subplots, parameter[]]
variable[Z] assign[=] call[name[model].predict, parameter[call[name[np].c_][tuple[[<ast.Call object at 0x7da1b1ff1de0>, <ast.Call object at 0x7da1b1ff28f0>]]]]]
variable[Z] assign[=] call[name[Z].reshape, parameter[name[xx].shape]]
call[name[ax].contourf, parameter[name[xx], name[yy], name[Z]]]
call[name[ax].scatter, parameter[call[name[X]][tuple[[<ast.Slice object at 0x7da1b1ff0070>, <ast.Constant object at 0x7da1b1ff0190>]]], call[name[X]][tuple[[<ast.Slice object at 0x7da1b1ff0130>, <ast.Constant object at 0x7da1b1ff1390>]]]]]
call[name[plt].show, parameter[]]
|
keyword[def] identifier[plot_decision_boundary] ( identifier[model] , identifier[X] , identifier[y] , identifier[step] = literal[int] , identifier[figsize] =( literal[int] , literal[int] ), identifier[alpha] = literal[int] , identifier[size] = literal[int] ):
literal[string]
identifier[x_min] , identifier[x_max] = identifier[X] [:, literal[int] ]. identifier[min] ()- literal[int] , identifier[X] [:, literal[int] ]. identifier[max] ()+ literal[int]
identifier[y_min] , identifier[y_max] = identifier[X] [:, literal[int] ]. identifier[min] ()- literal[int] , identifier[X] [:, literal[int] ]. identifier[max] ()+ literal[int]
identifier[xx] , identifier[yy] = identifier[np] . identifier[meshgrid] ( identifier[np] . identifier[arange] ( identifier[x_min] , identifier[x_max] , identifier[step] ),
identifier[np] . identifier[arange] ( identifier[y_min] , identifier[y_max] , identifier[step] ))
identifier[f] , identifier[ax] = identifier[plt] . identifier[subplots] ( identifier[figsize] = identifier[figsize] )
identifier[Z] = identifier[model] . identifier[predict] ( identifier[np] . identifier[c_] [ identifier[xx] . identifier[ravel] (), identifier[yy] . identifier[ravel] ()])
identifier[Z] = identifier[Z] . identifier[reshape] ( identifier[xx] . identifier[shape] )
identifier[ax] . identifier[contourf] ( identifier[xx] , identifier[yy] , identifier[Z] , identifier[alpha] = identifier[alpha] )
identifier[ax] . identifier[scatter] ( identifier[X] [:, literal[int] ], identifier[X] [:, literal[int] ], identifier[c] = identifier[y] , identifier[s] = identifier[size] , identifier[edgecolor] = literal[string] )
identifier[plt] . identifier[show] ()
|
def plot_decision_boundary(model, X, y, step=0.1, figsize=(10, 8), alpha=0.4, size=20):
"""Plots the classification decision boundary of `model` on `X` with labels `y`.
Using numpy and matplotlib.
"""
(x_min, x_max) = (X[:, 0].min() - 1, X[:, 0].max() + 1)
(y_min, y_max) = (X[:, 1].min() - 1, X[:, 1].max() + 1)
(xx, yy) = np.meshgrid(np.arange(x_min, x_max, step), np.arange(y_min, y_max, step))
(f, ax) = plt.subplots(figsize=figsize)
Z = model.predict(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
ax.contourf(xx, yy, Z, alpha=alpha)
ax.scatter(X[:, 0], X[:, 1], c=y, s=size, edgecolor='k')
plt.show()
|
def search_domain(self, searchterm):
"""Search for domains
:type searchterm: str
:rtype: list
"""
return self.__search(type_attribute=self.__mispdomaintypes(), value=searchterm)
|
def function[search_domain, parameter[self, searchterm]]:
constant[Search for domains
:type searchterm: str
:rtype: list
]
return[call[name[self].__search, parameter[]]]
|
keyword[def] identifier[search_domain] ( identifier[self] , identifier[searchterm] ):
literal[string]
keyword[return] identifier[self] . identifier[__search] ( identifier[type_attribute] = identifier[self] . identifier[__mispdomaintypes] (), identifier[value] = identifier[searchterm] )
|
def search_domain(self, searchterm):
"""Search for domains
:type searchterm: str
:rtype: list
"""
return self.__search(type_attribute=self.__mispdomaintypes(), value=searchterm)
|
def patSiemensStar(s0, n=72, vhigh=255, vlow=0, antiasing=False):
'''make line pattern'''
arr = np.full((s0,s0),vlow, dtype=np.uint8)
c = int(round(s0/2.))
s = 2*np.pi/(2*n)
step = 0
for i in range(2*n):
p0 = round(c+np.sin(step)*2*s0)
p1 = round(c+np.cos(step)*2*s0)
step += s
p2 = round(c+np.sin(step)*2*s0)
p3 = round(c+np.cos(step)*2*s0)
pts = np.array(((c,c),
(p0,p1),
(p2,p3) ), dtype=int)
cv2.fillConvexPoly(arr, pts,
color=vhigh if i%2 else vlow,
lineType=cv2.LINE_AA if antiasing else 0)
arr[c,c]=0
return arr.astype(float)
|
def function[patSiemensStar, parameter[s0, n, vhigh, vlow, antiasing]]:
constant[make line pattern]
variable[arr] assign[=] call[name[np].full, parameter[tuple[[<ast.Name object at 0x7da20c991a20>, <ast.Name object at 0x7da20c992050>]], name[vlow]]]
variable[c] assign[=] call[name[int], parameter[call[name[round], parameter[binary_operation[name[s0] / constant[2.0]]]]]]
variable[s] assign[=] binary_operation[binary_operation[constant[2] * name[np].pi] / binary_operation[constant[2] * name[n]]]
variable[step] assign[=] constant[0]
for taget[name[i]] in starred[call[name[range], parameter[binary_operation[constant[2] * name[n]]]]] begin[:]
variable[p0] assign[=] call[name[round], parameter[binary_operation[name[c] + binary_operation[binary_operation[call[name[np].sin, parameter[name[step]]] * constant[2]] * name[s0]]]]]
variable[p1] assign[=] call[name[round], parameter[binary_operation[name[c] + binary_operation[binary_operation[call[name[np].cos, parameter[name[step]]] * constant[2]] * name[s0]]]]]
<ast.AugAssign object at 0x7da18f00d2a0>
variable[p2] assign[=] call[name[round], parameter[binary_operation[name[c] + binary_operation[binary_operation[call[name[np].sin, parameter[name[step]]] * constant[2]] * name[s0]]]]]
variable[p3] assign[=] call[name[round], parameter[binary_operation[name[c] + binary_operation[binary_operation[call[name[np].cos, parameter[name[step]]] * constant[2]] * name[s0]]]]]
variable[pts] assign[=] call[name[np].array, parameter[tuple[[<ast.Tuple object at 0x7da18f00fa90>, <ast.Tuple object at 0x7da18f00d1b0>, <ast.Tuple object at 0x7da18f00d1e0>]]]]
call[name[cv2].fillConvexPoly, parameter[name[arr], name[pts]]]
call[name[arr]][tuple[[<ast.Name object at 0x7da1b11176d0>, <ast.Name object at 0x7da1b1117790>]]] assign[=] constant[0]
return[call[name[arr].astype, parameter[name[float]]]]
|
keyword[def] identifier[patSiemensStar] ( identifier[s0] , identifier[n] = literal[int] , identifier[vhigh] = literal[int] , identifier[vlow] = literal[int] , identifier[antiasing] = keyword[False] ):
literal[string]
identifier[arr] = identifier[np] . identifier[full] (( identifier[s0] , identifier[s0] ), identifier[vlow] , identifier[dtype] = identifier[np] . identifier[uint8] )
identifier[c] = identifier[int] ( identifier[round] ( identifier[s0] / literal[int] ))
identifier[s] = literal[int] * identifier[np] . identifier[pi] /( literal[int] * identifier[n] )
identifier[step] = literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] * identifier[n] ):
identifier[p0] = identifier[round] ( identifier[c] + identifier[np] . identifier[sin] ( identifier[step] )* literal[int] * identifier[s0] )
identifier[p1] = identifier[round] ( identifier[c] + identifier[np] . identifier[cos] ( identifier[step] )* literal[int] * identifier[s0] )
identifier[step] += identifier[s]
identifier[p2] = identifier[round] ( identifier[c] + identifier[np] . identifier[sin] ( identifier[step] )* literal[int] * identifier[s0] )
identifier[p3] = identifier[round] ( identifier[c] + identifier[np] . identifier[cos] ( identifier[step] )* literal[int] * identifier[s0] )
identifier[pts] = identifier[np] . identifier[array] ((( identifier[c] , identifier[c] ),
( identifier[p0] , identifier[p1] ),
( identifier[p2] , identifier[p3] )), identifier[dtype] = identifier[int] )
identifier[cv2] . identifier[fillConvexPoly] ( identifier[arr] , identifier[pts] ,
identifier[color] = identifier[vhigh] keyword[if] identifier[i] % literal[int] keyword[else] identifier[vlow] ,
identifier[lineType] = identifier[cv2] . identifier[LINE_AA] keyword[if] identifier[antiasing] keyword[else] literal[int] )
identifier[arr] [ identifier[c] , identifier[c] ]= literal[int]
keyword[return] identifier[arr] . identifier[astype] ( identifier[float] )
|
def patSiemensStar(s0, n=72, vhigh=255, vlow=0, antiasing=False):
"""make line pattern"""
arr = np.full((s0, s0), vlow, dtype=np.uint8)
c = int(round(s0 / 2.0))
s = 2 * np.pi / (2 * n)
step = 0
for i in range(2 * n):
p0 = round(c + np.sin(step) * 2 * s0)
p1 = round(c + np.cos(step) * 2 * s0)
step += s
p2 = round(c + np.sin(step) * 2 * s0)
p3 = round(c + np.cos(step) * 2 * s0)
pts = np.array(((c, c), (p0, p1), (p2, p3)), dtype=int)
cv2.fillConvexPoly(arr, pts, color=vhigh if i % 2 else vlow, lineType=cv2.LINE_AA if antiasing else 0) # depends on [control=['for'], data=['i']]
arr[c, c] = 0
return arr.astype(float)
|
def GetVectorAsNumpy(numpy_type, buf, count, offset):
""" GetVecAsNumpy decodes values starting at buf[head] as
`numpy_type`, where `numpy_type` is a numpy dtype. """
if np is not None:
# TODO: could set .flags.writeable = False to make users jump through
# hoops before modifying...
return np.frombuffer(buf, dtype=numpy_type, count=count, offset=offset)
else:
raise NumpyRequiredForThisFeature('Numpy was not found.')
|
def function[GetVectorAsNumpy, parameter[numpy_type, buf, count, offset]]:
constant[ GetVecAsNumpy decodes values starting at buf[head] as
`numpy_type`, where `numpy_type` is a numpy dtype. ]
if compare[name[np] is_not constant[None]] begin[:]
return[call[name[np].frombuffer, parameter[name[buf]]]]
|
keyword[def] identifier[GetVectorAsNumpy] ( identifier[numpy_type] , identifier[buf] , identifier[count] , identifier[offset] ):
literal[string]
keyword[if] identifier[np] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[np] . identifier[frombuffer] ( identifier[buf] , identifier[dtype] = identifier[numpy_type] , identifier[count] = identifier[count] , identifier[offset] = identifier[offset] )
keyword[else] :
keyword[raise] identifier[NumpyRequiredForThisFeature] ( literal[string] )
|
def GetVectorAsNumpy(numpy_type, buf, count, offset):
""" GetVecAsNumpy decodes values starting at buf[head] as
`numpy_type`, where `numpy_type` is a numpy dtype. """
if np is not None:
# TODO: could set .flags.writeable = False to make users jump through
# hoops before modifying...
return np.frombuffer(buf, dtype=numpy_type, count=count, offset=offset) # depends on [control=['if'], data=['np']]
else:
raise NumpyRequiredForThisFeature('Numpy was not found.')
|
def extract(self, file_name: str, sheet_name: str, region: List, variables: Dict) -> List[Extraction]:
"""
Args:
file_name (str): file name
sheet_name (str): sheet name
region (List[]): from upper left cell to bottom right cell, e.g., ['A,1', 'Z,10']
variables (Dict): key is variable name, value can be:
1. a single expression 2. comma separated expression, will be treated as location
$row, $col are built-in variables can be used in expression
constant row and column value can be noted as $NAME (e.g., $1, $10, $A, $GG)
Returns:
List[Extraction] : A list of extracted variables dictionary
"""
extractions = []
book = pyexcel.get_book(file_name=file_name)
sheet = book[sheet_name]
region = [ExcelExtractor._excel_coord_to_location(coord) for coord in region]
r = region[0][0]
# per row
for row in sheet.region(region[0], region[1]):
c = region[0][1]
# per col
for col in row:
var = copy.deepcopy(variables)
# per variable
for k, v in var.items():
parsed_v = ExcelExtractor._parse_variable(v, r, c)
if len(parsed_v) == 1: # normal variable
var[k] = parsed_v[0]
else: # location
rr, cc = parsed_v
var[k] = sheet[rr, cc]
extractions.append(var)
c += 1
r += 1
return extractions
|
def function[extract, parameter[self, file_name, sheet_name, region, variables]]:
constant[
Args:
file_name (str): file name
sheet_name (str): sheet name
region (List[]): from upper left cell to bottom right cell, e.g., ['A,1', 'Z,10']
variables (Dict): key is variable name, value can be:
1. a single expression 2. comma separated expression, will be treated as location
$row, $col are built-in variables can be used in expression
constant row and column value can be noted as $NAME (e.g., $1, $10, $A, $GG)
Returns:
List[Extraction] : A list of extracted variables dictionary
]
variable[extractions] assign[=] list[[]]
variable[book] assign[=] call[name[pyexcel].get_book, parameter[]]
variable[sheet] assign[=] call[name[book]][name[sheet_name]]
variable[region] assign[=] <ast.ListComp object at 0x7da1b0bc8fd0>
variable[r] assign[=] call[call[name[region]][constant[0]]][constant[0]]
for taget[name[row]] in starred[call[name[sheet].region, parameter[call[name[region]][constant[0]], call[name[region]][constant[1]]]]] begin[:]
variable[c] assign[=] call[call[name[region]][constant[0]]][constant[1]]
for taget[name[col]] in starred[name[row]] begin[:]
variable[var] assign[=] call[name[copy].deepcopy, parameter[name[variables]]]
for taget[tuple[[<ast.Name object at 0x7da1b0bc9540>, <ast.Name object at 0x7da1b0bcb490>]]] in starred[call[name[var].items, parameter[]]] begin[:]
variable[parsed_v] assign[=] call[name[ExcelExtractor]._parse_variable, parameter[name[v], name[r], name[c]]]
if compare[call[name[len], parameter[name[parsed_v]]] equal[==] constant[1]] begin[:]
call[name[var]][name[k]] assign[=] call[name[parsed_v]][constant[0]]
call[name[extractions].append, parameter[name[var]]]
<ast.AugAssign object at 0x7da1b0bcac20>
<ast.AugAssign object at 0x7da1b0bc9cf0>
return[name[extractions]]
|
keyword[def] identifier[extract] ( identifier[self] , identifier[file_name] : identifier[str] , identifier[sheet_name] : identifier[str] , identifier[region] : identifier[List] , identifier[variables] : identifier[Dict] )-> identifier[List] [ identifier[Extraction] ]:
literal[string]
identifier[extractions] =[]
identifier[book] = identifier[pyexcel] . identifier[get_book] ( identifier[file_name] = identifier[file_name] )
identifier[sheet] = identifier[book] [ identifier[sheet_name] ]
identifier[region] =[ identifier[ExcelExtractor] . identifier[_excel_coord_to_location] ( identifier[coord] ) keyword[for] identifier[coord] keyword[in] identifier[region] ]
identifier[r] = identifier[region] [ literal[int] ][ literal[int] ]
keyword[for] identifier[row] keyword[in] identifier[sheet] . identifier[region] ( identifier[region] [ literal[int] ], identifier[region] [ literal[int] ]):
identifier[c] = identifier[region] [ literal[int] ][ literal[int] ]
keyword[for] identifier[col] keyword[in] identifier[row] :
identifier[var] = identifier[copy] . identifier[deepcopy] ( identifier[variables] )
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[var] . identifier[items] ():
identifier[parsed_v] = identifier[ExcelExtractor] . identifier[_parse_variable] ( identifier[v] , identifier[r] , identifier[c] )
keyword[if] identifier[len] ( identifier[parsed_v] )== literal[int] :
identifier[var] [ identifier[k] ]= identifier[parsed_v] [ literal[int] ]
keyword[else] :
identifier[rr] , identifier[cc] = identifier[parsed_v]
identifier[var] [ identifier[k] ]= identifier[sheet] [ identifier[rr] , identifier[cc] ]
identifier[extractions] . identifier[append] ( identifier[var] )
identifier[c] += literal[int]
identifier[r] += literal[int]
keyword[return] identifier[extractions]
|
def extract(self, file_name: str, sheet_name: str, region: List, variables: Dict) -> List[Extraction]:
"""
Args:
file_name (str): file name
sheet_name (str): sheet name
region (List[]): from upper left cell to bottom right cell, e.g., ['A,1', 'Z,10']
variables (Dict): key is variable name, value can be:
1. a single expression 2. comma separated expression, will be treated as location
$row, $col are built-in variables can be used in expression
constant row and column value can be noted as $NAME (e.g., $1, $10, $A, $GG)
Returns:
List[Extraction] : A list of extracted variables dictionary
"""
extractions = []
book = pyexcel.get_book(file_name=file_name)
sheet = book[sheet_name]
region = [ExcelExtractor._excel_coord_to_location(coord) for coord in region]
r = region[0][0]
# per row
for row in sheet.region(region[0], region[1]):
c = region[0][1]
# per col
for col in row:
var = copy.deepcopy(variables)
# per variable
for (k, v) in var.items():
parsed_v = ExcelExtractor._parse_variable(v, r, c)
if len(parsed_v) == 1: # normal variable
var[k] = parsed_v[0] # depends on [control=['if'], data=[]]
else: # location
(rr, cc) = parsed_v
var[k] = sheet[rr, cc]
extractions.append(var) # depends on [control=['for'], data=[]]
c += 1 # depends on [control=['for'], data=[]]
r += 1 # depends on [control=['for'], data=['row']]
return extractions
|
def parse_options():
"""
Parses command-line options:
"""
try:
opts, args = getopt.getopt(sys.argv[1:],
'k:n:ht:v',
['kval=',
'size=',
'help',
'type=',
'verb'])
except getopt.GetoptError as err:
sys.stderr.write(str(err).capitalize())
usage()
sys.exit(1)
kval = 1
size = 8
ftype = 'php'
verb = False
for opt, arg in opts:
if opt in ('-h', '--help'):
usage()
sys.exit(0)
elif opt in ('-k', '--kval'):
kval = int(arg)
elif opt in ('-n', '--size'):
size = int(arg)
elif opt in ('-t', '--type'):
ftype = str(arg)
elif opt in ('-v', '--verb'):
verb = True
else:
assert False, 'Unhandled option: {0} {1}'.format(opt, arg)
return ftype, kval, size, verb
|
def function[parse_options, parameter[]]:
constant[
Parses command-line options:
]
<ast.Try object at 0x7da1b112acb0>
variable[kval] assign[=] constant[1]
variable[size] assign[=] constant[8]
variable[ftype] assign[=] constant[php]
variable[verb] assign[=] constant[False]
for taget[tuple[[<ast.Name object at 0x7da1b112b160>, <ast.Name object at 0x7da1b112b3a0>]]] in starred[name[opts]] begin[:]
if compare[name[opt] in tuple[[<ast.Constant object at 0x7da1b112bd30>, <ast.Constant object at 0x7da1b112b940>]]] begin[:]
call[name[usage], parameter[]]
call[name[sys].exit, parameter[constant[0]]]
return[tuple[[<ast.Name object at 0x7da1b124ca00>, <ast.Name object at 0x7da1b124d750>, <ast.Name object at 0x7da1b1178970>, <ast.Name object at 0x7da1b11797e0>]]]
|
keyword[def] identifier[parse_options] ():
literal[string]
keyword[try] :
identifier[opts] , identifier[args] = identifier[getopt] . identifier[getopt] ( identifier[sys] . identifier[argv] [ literal[int] :],
literal[string] ,
[ literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ])
keyword[except] identifier[getopt] . identifier[GetoptError] keyword[as] identifier[err] :
identifier[sys] . identifier[stderr] . identifier[write] ( identifier[str] ( identifier[err] ). identifier[capitalize] ())
identifier[usage] ()
identifier[sys] . identifier[exit] ( literal[int] )
identifier[kval] = literal[int]
identifier[size] = literal[int]
identifier[ftype] = literal[string]
identifier[verb] = keyword[False]
keyword[for] identifier[opt] , identifier[arg] keyword[in] identifier[opts] :
keyword[if] identifier[opt] keyword[in] ( literal[string] , literal[string] ):
identifier[usage] ()
identifier[sys] . identifier[exit] ( literal[int] )
keyword[elif] identifier[opt] keyword[in] ( literal[string] , literal[string] ):
identifier[kval] = identifier[int] ( identifier[arg] )
keyword[elif] identifier[opt] keyword[in] ( literal[string] , literal[string] ):
identifier[size] = identifier[int] ( identifier[arg] )
keyword[elif] identifier[opt] keyword[in] ( literal[string] , literal[string] ):
identifier[ftype] = identifier[str] ( identifier[arg] )
keyword[elif] identifier[opt] keyword[in] ( literal[string] , literal[string] ):
identifier[verb] = keyword[True]
keyword[else] :
keyword[assert] keyword[False] , literal[string] . identifier[format] ( identifier[opt] , identifier[arg] )
keyword[return] identifier[ftype] , identifier[kval] , identifier[size] , identifier[verb]
|
def parse_options():
"""
Parses command-line options:
"""
try:
(opts, args) = getopt.getopt(sys.argv[1:], 'k:n:ht:v', ['kval=', 'size=', 'help', 'type=', 'verb']) # depends on [control=['try'], data=[]]
except getopt.GetoptError as err:
sys.stderr.write(str(err).capitalize())
usage()
sys.exit(1) # depends on [control=['except'], data=['err']]
kval = 1
size = 8
ftype = 'php'
verb = False
for (opt, arg) in opts:
if opt in ('-h', '--help'):
usage()
sys.exit(0) # depends on [control=['if'], data=[]]
elif opt in ('-k', '--kval'):
kval = int(arg) # depends on [control=['if'], data=[]]
elif opt in ('-n', '--size'):
size = int(arg) # depends on [control=['if'], data=[]]
elif opt in ('-t', '--type'):
ftype = str(arg) # depends on [control=['if'], data=[]]
elif opt in ('-v', '--verb'):
verb = True # depends on [control=['if'], data=[]]
else:
assert False, 'Unhandled option: {0} {1}'.format(opt, arg) # depends on [control=['for'], data=[]]
return (ftype, kval, size, verb)
|
def fromPy(cls, val, typeObj, vldMask=None):
"""
:param val: value of python type int or None
:param typeObj: instance of Integer
:param vldMask: None vldMask is resolved from val,
if is 0 value is invalidated
if is 1 value has to be valid
"""
assert isinstance(typeObj, Integer)
vld = int(val is not None)
if not vld:
assert vldMask is None or vldMask == 0
val = 0
else:
if vldMask == 0:
val = False
vld = 0
else:
val = int(val)
return cls(val, typeObj, vld)
|
def function[fromPy, parameter[cls, val, typeObj, vldMask]]:
constant[
:param val: value of python type int or None
:param typeObj: instance of Integer
:param vldMask: None vldMask is resolved from val,
if is 0 value is invalidated
if is 1 value has to be valid
]
assert[call[name[isinstance], parameter[name[typeObj], name[Integer]]]]
variable[vld] assign[=] call[name[int], parameter[compare[name[val] is_not constant[None]]]]
if <ast.UnaryOp object at 0x7da18c4ce2c0> begin[:]
assert[<ast.BoolOp object at 0x7da18c4cc0d0>]
variable[val] assign[=] constant[0]
return[call[name[cls], parameter[name[val], name[typeObj], name[vld]]]]
|
keyword[def] identifier[fromPy] ( identifier[cls] , identifier[val] , identifier[typeObj] , identifier[vldMask] = keyword[None] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[typeObj] , identifier[Integer] )
identifier[vld] = identifier[int] ( identifier[val] keyword[is] keyword[not] keyword[None] )
keyword[if] keyword[not] identifier[vld] :
keyword[assert] identifier[vldMask] keyword[is] keyword[None] keyword[or] identifier[vldMask] == literal[int]
identifier[val] = literal[int]
keyword[else] :
keyword[if] identifier[vldMask] == literal[int] :
identifier[val] = keyword[False]
identifier[vld] = literal[int]
keyword[else] :
identifier[val] = identifier[int] ( identifier[val] )
keyword[return] identifier[cls] ( identifier[val] , identifier[typeObj] , identifier[vld] )
|
def fromPy(cls, val, typeObj, vldMask=None):
"""
:param val: value of python type int or None
:param typeObj: instance of Integer
:param vldMask: None vldMask is resolved from val,
if is 0 value is invalidated
if is 1 value has to be valid
"""
assert isinstance(typeObj, Integer)
vld = int(val is not None)
if not vld:
assert vldMask is None or vldMask == 0
val = 0 # depends on [control=['if'], data=[]]
elif vldMask == 0:
val = False
vld = 0 # depends on [control=['if'], data=[]]
else:
val = int(val)
return cls(val, typeObj, vld)
|
def clear_published(self):
"""Removes the published status.
:raise: ``NoAccess`` -- ``Metadata.isRequired()`` is ``true`` or ``Metadata.isReadOnly()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
metadata = Metadata(**settings.METADATA['published'])
if metadata.is_read_only() or metadata.is_required():
raise NoAccess()
self._my_map['published'] = False
|
def function[clear_published, parameter[self]]:
constant[Removes the published status.
:raise: ``NoAccess`` -- ``Metadata.isRequired()`` is ``true`` or ``Metadata.isReadOnly()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
]
variable[metadata] assign[=] call[name[Metadata], parameter[]]
if <ast.BoolOp object at 0x7da207f01510> begin[:]
<ast.Raise object at 0x7da207f02200>
call[name[self]._my_map][constant[published]] assign[=] constant[False]
|
keyword[def] identifier[clear_published] ( identifier[self] ):
literal[string]
identifier[metadata] = identifier[Metadata] (** identifier[settings] . identifier[METADATA] [ literal[string] ])
keyword[if] identifier[metadata] . identifier[is_read_only] () keyword[or] identifier[metadata] . identifier[is_required] ():
keyword[raise] identifier[NoAccess] ()
identifier[self] . identifier[_my_map] [ literal[string] ]= keyword[False]
|
def clear_published(self):
"""Removes the published status.
:raise: ``NoAccess`` -- ``Metadata.isRequired()`` is ``true`` or ``Metadata.isReadOnly()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
metadata = Metadata(**settings.METADATA['published'])
if metadata.is_read_only() or metadata.is_required():
raise NoAccess() # depends on [control=['if'], data=[]]
self._my_map['published'] = False
|
def _read_body_by_chunk(self, response, file, raw=False):
'''Read the connection using chunked transfer encoding.
Coroutine.
'''
reader = ChunkedTransferReader(self._connection)
file_is_async = hasattr(file, 'drain')
while True:
chunk_size, data = yield from reader.read_chunk_header()
self._data_event_dispatcher.notify_read(data)
if raw:
file.write(data)
if not chunk_size:
break
while True:
content, data = yield from reader.read_chunk_body()
self._data_event_dispatcher.notify_read(data)
if not content:
if raw:
file.write(data)
break
content = self._decompress_data(content)
if file:
file.write(content)
if file_is_async:
yield from file.drain()
content = self._flush_decompressor()
if file:
file.write(content)
if file_is_async:
yield from file.drain()
trailer_data = yield from reader.read_trailer()
self._data_event_dispatcher.notify_read(trailer_data)
if file and raw:
file.write(trailer_data)
if file_is_async:
yield from file.drain()
response.fields.parse(trailer_data)
|
def function[_read_body_by_chunk, parameter[self, response, file, raw]]:
constant[Read the connection using chunked transfer encoding.
Coroutine.
]
variable[reader] assign[=] call[name[ChunkedTransferReader], parameter[name[self]._connection]]
variable[file_is_async] assign[=] call[name[hasattr], parameter[name[file], constant[drain]]]
while constant[True] begin[:]
<ast.Tuple object at 0x7da2054a5cf0> assign[=] <ast.YieldFrom object at 0x7da2054a40a0>
call[name[self]._data_event_dispatcher.notify_read, parameter[name[data]]]
if name[raw] begin[:]
call[name[file].write, parameter[name[data]]]
if <ast.UnaryOp object at 0x7da1b23464a0> begin[:]
break
while constant[True] begin[:]
<ast.Tuple object at 0x7da1b2345150> assign[=] <ast.YieldFrom object at 0x7da1b2346050>
call[name[self]._data_event_dispatcher.notify_read, parameter[name[data]]]
if <ast.UnaryOp object at 0x7da1b2344cd0> begin[:]
if name[raw] begin[:]
call[name[file].write, parameter[name[data]]]
break
variable[content] assign[=] call[name[self]._decompress_data, parameter[name[content]]]
if name[file] begin[:]
call[name[file].write, parameter[name[content]]]
if name[file_is_async] begin[:]
<ast.YieldFrom object at 0x7da1b2344400>
variable[content] assign[=] call[name[self]._flush_decompressor, parameter[]]
if name[file] begin[:]
call[name[file].write, parameter[name[content]]]
if name[file_is_async] begin[:]
<ast.YieldFrom object at 0x7da1b2345270>
variable[trailer_data] assign[=] <ast.YieldFrom object at 0x7da1b2346b90>
call[name[self]._data_event_dispatcher.notify_read, parameter[name[trailer_data]]]
if <ast.BoolOp object at 0x7da1b2345600> begin[:]
call[name[file].write, parameter[name[trailer_data]]]
if name[file_is_async] begin[:]
<ast.YieldFrom object at 0x7da1b2346a40>
call[name[response].fields.parse, parameter[name[trailer_data]]]
|
keyword[def] identifier[_read_body_by_chunk] ( identifier[self] , identifier[response] , identifier[file] , identifier[raw] = keyword[False] ):
literal[string]
identifier[reader] = identifier[ChunkedTransferReader] ( identifier[self] . identifier[_connection] )
identifier[file_is_async] = identifier[hasattr] ( identifier[file] , literal[string] )
keyword[while] keyword[True] :
identifier[chunk_size] , identifier[data] = keyword[yield] keyword[from] identifier[reader] . identifier[read_chunk_header] ()
identifier[self] . identifier[_data_event_dispatcher] . identifier[notify_read] ( identifier[data] )
keyword[if] identifier[raw] :
identifier[file] . identifier[write] ( identifier[data] )
keyword[if] keyword[not] identifier[chunk_size] :
keyword[break]
keyword[while] keyword[True] :
identifier[content] , identifier[data] = keyword[yield] keyword[from] identifier[reader] . identifier[read_chunk_body] ()
identifier[self] . identifier[_data_event_dispatcher] . identifier[notify_read] ( identifier[data] )
keyword[if] keyword[not] identifier[content] :
keyword[if] identifier[raw] :
identifier[file] . identifier[write] ( identifier[data] )
keyword[break]
identifier[content] = identifier[self] . identifier[_decompress_data] ( identifier[content] )
keyword[if] identifier[file] :
identifier[file] . identifier[write] ( identifier[content] )
keyword[if] identifier[file_is_async] :
keyword[yield] keyword[from] identifier[file] . identifier[drain] ()
identifier[content] = identifier[self] . identifier[_flush_decompressor] ()
keyword[if] identifier[file] :
identifier[file] . identifier[write] ( identifier[content] )
keyword[if] identifier[file_is_async] :
keyword[yield] keyword[from] identifier[file] . identifier[drain] ()
identifier[trailer_data] = keyword[yield] keyword[from] identifier[reader] . identifier[read_trailer] ()
identifier[self] . identifier[_data_event_dispatcher] . identifier[notify_read] ( identifier[trailer_data] )
keyword[if] identifier[file] keyword[and] identifier[raw] :
identifier[file] . identifier[write] ( identifier[trailer_data] )
keyword[if] identifier[file_is_async] :
keyword[yield] keyword[from] identifier[file] . identifier[drain] ()
identifier[response] . identifier[fields] . identifier[parse] ( identifier[trailer_data] )
|
def _read_body_by_chunk(self, response, file, raw=False):
"""Read the connection using chunked transfer encoding.
Coroutine.
"""
reader = ChunkedTransferReader(self._connection)
file_is_async = hasattr(file, 'drain')
while True:
(chunk_size, data) = (yield from reader.read_chunk_header())
self._data_event_dispatcher.notify_read(data)
if raw:
file.write(data) # depends on [control=['if'], data=[]]
if not chunk_size:
break # depends on [control=['if'], data=[]]
while True:
(content, data) = (yield from reader.read_chunk_body())
self._data_event_dispatcher.notify_read(data)
if not content:
if raw:
file.write(data) # depends on [control=['if'], data=[]]
break # depends on [control=['if'], data=[]]
content = self._decompress_data(content)
if file:
file.write(content)
if file_is_async:
yield from file.drain() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['while'], data=[]]
content = self._flush_decompressor()
if file:
file.write(content)
if file_is_async:
yield from file.drain() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
trailer_data = (yield from reader.read_trailer())
self._data_event_dispatcher.notify_read(trailer_data)
if file and raw:
file.write(trailer_data)
if file_is_async:
yield from file.drain() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
response.fields.parse(trailer_data)
|
def cross_validate(self, ax):
'''
Performs the cross-validation step.
'''
# The CDPP to beat
cdpp_opt = self.get_cdpp_arr()
# Loop over all chunks
for b, brkpt in enumerate(self.breakpoints):
log.info("Cross-validating chunk %d/%d..." %
(b + 1, len(self.breakpoints)))
# Mask for current chunk
m = self.get_masked_chunk(b)
# Mask transits and outliers
time = self.time[m]
flux = self.fraw[m]
ferr = self.fraw_err[m]
med = np.nanmedian(self.fraw)
# Setup the GP
gp = GP(self.kernel, self.kernel_params, white=False)
gp.compute(time, ferr)
# The masks
masks = list(Chunks(np.arange(0, len(time)),
len(time) // self.cdivs))
# The pre-computed matrices
pre_v = [self.cv_precompute(mask, b) for mask in masks]
# Initialize with the nPLD solution
log_lam_opt = np.log10(self.lam[b])
scatter_opt = self.validation_scatter(
log_lam_opt, b, masks, pre_v, gp, flux, time, med)
log.info("Iter 0/%d: " % (self.piter) +
"logL = (%s), s = %.3f" %
(", ".join(["%.3f" % l for l in log_lam_opt]),
scatter_opt))
# Do `piter` iterations
for p in range(self.piter):
# Perturb the initial condition a bit
log_lam = np.array(
np.log10(self.lam[b])) * \
(1 + self.ppert * np.random.randn(len(self.lam[b])))
scatter = self.validation_scatter(
log_lam, b, masks, pre_v, gp, flux, time, med)
log.info("Initializing at: " +
"logL = (%s), s = %.3f" %
(", ".join(["%.3f" % l for l in log_lam]), scatter))
# Call the minimizer
log_lam, scatter, _, _, _, _ = \
fmin_powell(self.validation_scatter, log_lam,
args=(b, masks, pre_v, gp, flux, time, med),
maxfun=self.pmaxf, disp=False,
full_output=True)
# Did it improve the CDPP?
tmp = np.array(self.lam[b])
self.lam[b] = 10 ** log_lam
self.compute()
cdpp = self.get_cdpp_arr()[b]
self.lam[b] = tmp
if cdpp < cdpp_opt[b]:
cdpp_opt[b] = cdpp
log_lam_opt = log_lam
# Log it
log.info("Iter %d/%d: " % (p + 1, self.piter) +
"logL = (%s), s = %.3f" %
(", ".join(["%.3f" % l for l in log_lam]), scatter))
# The best solution
log.info("Found minimum: logL = (%s), s = %.3f" %
(", ".join(["%.3f" % l for l in log_lam_opt]),
scatter_opt))
self.lam[b] = 10 ** log_lam_opt
# We're just going to plot lambda as a function of chunk number
bs = np.arange(len(self.breakpoints))
color = ['k', 'b', 'r', 'g', 'y']
for n in range(self.pld_order):
ax[0].plot(bs + 1, [np.log10(self.lam[b][n])
for b in bs], '.', color=color[n])
ax[0].plot(bs + 1, [np.log10(self.lam[b][n])
for b in bs], '-', color=color[n], alpha=0.25)
ax[0].set_ylabel(r'$\log\Lambda$', fontsize=5)
ax[0].margins(0.1, 0.1)
ax[0].set_xticks(np.arange(1, len(self.breakpoints) + 1))
ax[0].set_xticklabels([])
# Now plot the CDPP
cdpp_arr = self.get_cdpp_arr()
ax[1].plot(bs + 1, cdpp_arr, 'b.')
ax[1].plot(bs + 1, cdpp_arr, 'b-', alpha=0.25)
ax[1].margins(0.1, 0.1)
ax[1].set_ylabel(r'Scatter (ppm)', fontsize=5)
ax[1].set_xlabel(r'Chunk', fontsize=5)
ax[1].set_xticks(np.arange(1, len(self.breakpoints) + 1))
|
def function[cross_validate, parameter[self, ax]]:
constant[
Performs the cross-validation step.
]
variable[cdpp_opt] assign[=] call[name[self].get_cdpp_arr, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b0e553f0>, <ast.Name object at 0x7da1b0e55420>]]] in starred[call[name[enumerate], parameter[name[self].breakpoints]]] begin[:]
call[name[log].info, parameter[binary_operation[constant[Cross-validating chunk %d/%d...] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da1b0e55660>, <ast.Call object at 0x7da1b0e556f0>]]]]]
variable[m] assign[=] call[name[self].get_masked_chunk, parameter[name[b]]]
variable[time] assign[=] call[name[self].time][name[m]]
variable[flux] assign[=] call[name[self].fraw][name[m]]
variable[ferr] assign[=] call[name[self].fraw_err][name[m]]
variable[med] assign[=] call[name[np].nanmedian, parameter[name[self].fraw]]
variable[gp] assign[=] call[name[GP], parameter[name[self].kernel, name[self].kernel_params]]
call[name[gp].compute, parameter[name[time], name[ferr]]]
variable[masks] assign[=] call[name[list], parameter[call[name[Chunks], parameter[call[name[np].arange, parameter[constant[0], call[name[len], parameter[name[time]]]]], binary_operation[call[name[len], parameter[name[time]]] <ast.FloorDiv object at 0x7da2590d6bc0> name[self].cdivs]]]]]
variable[pre_v] assign[=] <ast.ListComp object at 0x7da1b0e54040>
variable[log_lam_opt] assign[=] call[name[np].log10, parameter[call[name[self].lam][name[b]]]]
variable[scatter_opt] assign[=] call[name[self].validation_scatter, parameter[name[log_lam_opt], name[b], name[masks], name[pre_v], name[gp], name[flux], name[time], name[med]]]
call[name[log].info, parameter[binary_operation[binary_operation[constant[Iter 0/%d: ] <ast.Mod object at 0x7da2590d6920> name[self].piter] + binary_operation[constant[logL = (%s), s = %.3f] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b0e56500>, <ast.Name object at 0x7da1b0e55cc0>]]]]]]
for taget[name[p]] in starred[call[name[range], parameter[name[self].piter]]] begin[:]
variable[log_lam] assign[=] binary_operation[call[name[np].array, parameter[call[name[np].log10, parameter[call[name[self].lam][name[b]]]]]] * binary_operation[constant[1] + binary_operation[name[self].ppert * call[name[np].random.randn, parameter[call[name[len], parameter[call[name[self].lam][name[b]]]]]]]]]
variable[scatter] assign[=] call[name[self].validation_scatter, parameter[name[log_lam], name[b], name[masks], name[pre_v], name[gp], name[flux], name[time], name[med]]]
call[name[log].info, parameter[binary_operation[constant[Initializing at: ] + binary_operation[constant[logL = (%s), s = %.3f] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b0e57520>, <ast.Name object at 0x7da1b0e57730>]]]]]]
<ast.Tuple object at 0x7da1b0e576a0> assign[=] call[name[fmin_powell], parameter[name[self].validation_scatter, name[log_lam]]]
variable[tmp] assign[=] call[name[np].array, parameter[call[name[self].lam][name[b]]]]
call[name[self].lam][name[b]] assign[=] binary_operation[constant[10] ** name[log_lam]]
call[name[self].compute, parameter[]]
variable[cdpp] assign[=] call[call[name[self].get_cdpp_arr, parameter[]]][name[b]]
call[name[self].lam][name[b]] assign[=] name[tmp]
if compare[name[cdpp] less[<] call[name[cdpp_opt]][name[b]]] begin[:]
call[name[cdpp_opt]][name[b]] assign[=] name[cdpp]
variable[log_lam_opt] assign[=] name[log_lam]
call[name[log].info, parameter[binary_operation[binary_operation[constant[Iter %d/%d: ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da1b0e5a7d0>, <ast.Attribute object at 0x7da1b0e5a7a0>]]] + binary_operation[constant[logL = (%s), s = %.3f] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b0e5bcd0>, <ast.Name object at 0x7da1b0e59150>]]]]]]
call[name[log].info, parameter[binary_operation[constant[Found minimum: logL = (%s), s = %.3f] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b0e5a470>, <ast.Name object at 0x7da1b0e5be50>]]]]]
call[name[self].lam][name[b]] assign[=] binary_operation[constant[10] ** name[log_lam_opt]]
variable[bs] assign[=] call[name[np].arange, parameter[call[name[len], parameter[name[self].breakpoints]]]]
variable[color] assign[=] list[[<ast.Constant object at 0x7da1b0e5b7c0>, <ast.Constant object at 0x7da1b0e5b850>, <ast.Constant object at 0x7da1b0e5b790>, <ast.Constant object at 0x7da1b0e5b880>, <ast.Constant object at 0x7da1b0e5bdc0>]]
for taget[name[n]] in starred[call[name[range], parameter[name[self].pld_order]]] begin[:]
call[call[name[ax]][constant[0]].plot, parameter[binary_operation[name[bs] + constant[1]], <ast.ListComp object at 0x7da1b0e5ba00>, constant[.]]]
call[call[name[ax]][constant[0]].plot, parameter[binary_operation[name[bs] + constant[1]], <ast.ListComp object at 0x7da1b0e5b3d0>, constant[-]]]
call[call[name[ax]][constant[0]].set_ylabel, parameter[constant[$\log\Lambda$]]]
call[call[name[ax]][constant[0]].margins, parameter[constant[0.1], constant[0.1]]]
call[call[name[ax]][constant[0]].set_xticks, parameter[call[name[np].arange, parameter[constant[1], binary_operation[call[name[len], parameter[name[self].breakpoints]] + constant[1]]]]]]
call[call[name[ax]][constant[0]].set_xticklabels, parameter[list[[]]]]
variable[cdpp_arr] assign[=] call[name[self].get_cdpp_arr, parameter[]]
call[call[name[ax]][constant[1]].plot, parameter[binary_operation[name[bs] + constant[1]], name[cdpp_arr], constant[b.]]]
call[call[name[ax]][constant[1]].plot, parameter[binary_operation[name[bs] + constant[1]], name[cdpp_arr], constant[b-]]]
call[call[name[ax]][constant[1]].margins, parameter[constant[0.1], constant[0.1]]]
call[call[name[ax]][constant[1]].set_ylabel, parameter[constant[Scatter (ppm)]]]
call[call[name[ax]][constant[1]].set_xlabel, parameter[constant[Chunk]]]
call[call[name[ax]][constant[1]].set_xticks, parameter[call[name[np].arange, parameter[constant[1], binary_operation[call[name[len], parameter[name[self].breakpoints]] + constant[1]]]]]]
|
keyword[def] identifier[cross_validate] ( identifier[self] , identifier[ax] ):
literal[string]
identifier[cdpp_opt] = identifier[self] . identifier[get_cdpp_arr] ()
keyword[for] identifier[b] , identifier[brkpt] keyword[in] identifier[enumerate] ( identifier[self] . identifier[breakpoints] ):
identifier[log] . identifier[info] ( literal[string] %
( identifier[b] + literal[int] , identifier[len] ( identifier[self] . identifier[breakpoints] )))
identifier[m] = identifier[self] . identifier[get_masked_chunk] ( identifier[b] )
identifier[time] = identifier[self] . identifier[time] [ identifier[m] ]
identifier[flux] = identifier[self] . identifier[fraw] [ identifier[m] ]
identifier[ferr] = identifier[self] . identifier[fraw_err] [ identifier[m] ]
identifier[med] = identifier[np] . identifier[nanmedian] ( identifier[self] . identifier[fraw] )
identifier[gp] = identifier[GP] ( identifier[self] . identifier[kernel] , identifier[self] . identifier[kernel_params] , identifier[white] = keyword[False] )
identifier[gp] . identifier[compute] ( identifier[time] , identifier[ferr] )
identifier[masks] = identifier[list] ( identifier[Chunks] ( identifier[np] . identifier[arange] ( literal[int] , identifier[len] ( identifier[time] )),
identifier[len] ( identifier[time] )// identifier[self] . identifier[cdivs] ))
identifier[pre_v] =[ identifier[self] . identifier[cv_precompute] ( identifier[mask] , identifier[b] ) keyword[for] identifier[mask] keyword[in] identifier[masks] ]
identifier[log_lam_opt] = identifier[np] . identifier[log10] ( identifier[self] . identifier[lam] [ identifier[b] ])
identifier[scatter_opt] = identifier[self] . identifier[validation_scatter] (
identifier[log_lam_opt] , identifier[b] , identifier[masks] , identifier[pre_v] , identifier[gp] , identifier[flux] , identifier[time] , identifier[med] )
identifier[log] . identifier[info] ( literal[string] %( identifier[self] . identifier[piter] )+
literal[string] %
( literal[string] . identifier[join] ([ literal[string] % identifier[l] keyword[for] identifier[l] keyword[in] identifier[log_lam_opt] ]),
identifier[scatter_opt] ))
keyword[for] identifier[p] keyword[in] identifier[range] ( identifier[self] . identifier[piter] ):
identifier[log_lam] = identifier[np] . identifier[array] (
identifier[np] . identifier[log10] ( identifier[self] . identifier[lam] [ identifier[b] ]))*( literal[int] + identifier[self] . identifier[ppert] * identifier[np] . identifier[random] . identifier[randn] ( identifier[len] ( identifier[self] . identifier[lam] [ identifier[b] ])))
identifier[scatter] = identifier[self] . identifier[validation_scatter] (
identifier[log_lam] , identifier[b] , identifier[masks] , identifier[pre_v] , identifier[gp] , identifier[flux] , identifier[time] , identifier[med] )
identifier[log] . identifier[info] ( literal[string] +
literal[string] %
( literal[string] . identifier[join] ([ literal[string] % identifier[l] keyword[for] identifier[l] keyword[in] identifier[log_lam] ]), identifier[scatter] ))
identifier[log_lam] , identifier[scatter] , identifier[_] , identifier[_] , identifier[_] , identifier[_] = identifier[fmin_powell] ( identifier[self] . identifier[validation_scatter] , identifier[log_lam] ,
identifier[args] =( identifier[b] , identifier[masks] , identifier[pre_v] , identifier[gp] , identifier[flux] , identifier[time] , identifier[med] ),
identifier[maxfun] = identifier[self] . identifier[pmaxf] , identifier[disp] = keyword[False] ,
identifier[full_output] = keyword[True] )
identifier[tmp] = identifier[np] . identifier[array] ( identifier[self] . identifier[lam] [ identifier[b] ])
identifier[self] . identifier[lam] [ identifier[b] ]= literal[int] ** identifier[log_lam]
identifier[self] . identifier[compute] ()
identifier[cdpp] = identifier[self] . identifier[get_cdpp_arr] ()[ identifier[b] ]
identifier[self] . identifier[lam] [ identifier[b] ]= identifier[tmp]
keyword[if] identifier[cdpp] < identifier[cdpp_opt] [ identifier[b] ]:
identifier[cdpp_opt] [ identifier[b] ]= identifier[cdpp]
identifier[log_lam_opt] = identifier[log_lam]
identifier[log] . identifier[info] ( literal[string] %( identifier[p] + literal[int] , identifier[self] . identifier[piter] )+
literal[string] %
( literal[string] . identifier[join] ([ literal[string] % identifier[l] keyword[for] identifier[l] keyword[in] identifier[log_lam] ]), identifier[scatter] ))
identifier[log] . identifier[info] ( literal[string] %
( literal[string] . identifier[join] ([ literal[string] % identifier[l] keyword[for] identifier[l] keyword[in] identifier[log_lam_opt] ]),
identifier[scatter_opt] ))
identifier[self] . identifier[lam] [ identifier[b] ]= literal[int] ** identifier[log_lam_opt]
identifier[bs] = identifier[np] . identifier[arange] ( identifier[len] ( identifier[self] . identifier[breakpoints] ))
identifier[color] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[self] . identifier[pld_order] ):
identifier[ax] [ literal[int] ]. identifier[plot] ( identifier[bs] + literal[int] ,[ identifier[np] . identifier[log10] ( identifier[self] . identifier[lam] [ identifier[b] ][ identifier[n] ])
keyword[for] identifier[b] keyword[in] identifier[bs] ], literal[string] , identifier[color] = identifier[color] [ identifier[n] ])
identifier[ax] [ literal[int] ]. identifier[plot] ( identifier[bs] + literal[int] ,[ identifier[np] . identifier[log10] ( identifier[self] . identifier[lam] [ identifier[b] ][ identifier[n] ])
keyword[for] identifier[b] keyword[in] identifier[bs] ], literal[string] , identifier[color] = identifier[color] [ identifier[n] ], identifier[alpha] = literal[int] )
identifier[ax] [ literal[int] ]. identifier[set_ylabel] ( literal[string] , identifier[fontsize] = literal[int] )
identifier[ax] [ literal[int] ]. identifier[margins] ( literal[int] , literal[int] )
identifier[ax] [ literal[int] ]. identifier[set_xticks] ( identifier[np] . identifier[arange] ( literal[int] , identifier[len] ( identifier[self] . identifier[breakpoints] )+ literal[int] ))
identifier[ax] [ literal[int] ]. identifier[set_xticklabels] ([])
identifier[cdpp_arr] = identifier[self] . identifier[get_cdpp_arr] ()
identifier[ax] [ literal[int] ]. identifier[plot] ( identifier[bs] + literal[int] , identifier[cdpp_arr] , literal[string] )
identifier[ax] [ literal[int] ]. identifier[plot] ( identifier[bs] + literal[int] , identifier[cdpp_arr] , literal[string] , identifier[alpha] = literal[int] )
identifier[ax] [ literal[int] ]. identifier[margins] ( literal[int] , literal[int] )
identifier[ax] [ literal[int] ]. identifier[set_ylabel] ( literal[string] , identifier[fontsize] = literal[int] )
identifier[ax] [ literal[int] ]. identifier[set_xlabel] ( literal[string] , identifier[fontsize] = literal[int] )
identifier[ax] [ literal[int] ]. identifier[set_xticks] ( identifier[np] . identifier[arange] ( literal[int] , identifier[len] ( identifier[self] . identifier[breakpoints] )+ literal[int] ))
|
def cross_validate(self, ax):
"""
Performs the cross-validation step.
"""
# The CDPP to beat
cdpp_opt = self.get_cdpp_arr()
# Loop over all chunks
for (b, brkpt) in enumerate(self.breakpoints):
log.info('Cross-validating chunk %d/%d...' % (b + 1, len(self.breakpoints)))
# Mask for current chunk
m = self.get_masked_chunk(b)
# Mask transits and outliers
time = self.time[m]
flux = self.fraw[m]
ferr = self.fraw_err[m]
med = np.nanmedian(self.fraw)
# Setup the GP
gp = GP(self.kernel, self.kernel_params, white=False)
gp.compute(time, ferr)
# The masks
masks = list(Chunks(np.arange(0, len(time)), len(time) // self.cdivs))
# The pre-computed matrices
pre_v = [self.cv_precompute(mask, b) for mask in masks]
# Initialize with the nPLD solution
log_lam_opt = np.log10(self.lam[b])
scatter_opt = self.validation_scatter(log_lam_opt, b, masks, pre_v, gp, flux, time, med)
log.info('Iter 0/%d: ' % self.piter + 'logL = (%s), s = %.3f' % (', '.join(['%.3f' % l for l in log_lam_opt]), scatter_opt))
# Do `piter` iterations
for p in range(self.piter):
# Perturb the initial condition a bit
log_lam = np.array(np.log10(self.lam[b])) * (1 + self.ppert * np.random.randn(len(self.lam[b])))
scatter = self.validation_scatter(log_lam, b, masks, pre_v, gp, flux, time, med)
log.info('Initializing at: ' + 'logL = (%s), s = %.3f' % (', '.join(['%.3f' % l for l in log_lam]), scatter))
# Call the minimizer
(log_lam, scatter, _, _, _, _) = fmin_powell(self.validation_scatter, log_lam, args=(b, masks, pre_v, gp, flux, time, med), maxfun=self.pmaxf, disp=False, full_output=True)
# Did it improve the CDPP?
tmp = np.array(self.lam[b])
self.lam[b] = 10 ** log_lam
self.compute()
cdpp = self.get_cdpp_arr()[b]
self.lam[b] = tmp
if cdpp < cdpp_opt[b]:
cdpp_opt[b] = cdpp
log_lam_opt = log_lam # depends on [control=['if'], data=['cdpp']]
# Log it
log.info('Iter %d/%d: ' % (p + 1, self.piter) + 'logL = (%s), s = %.3f' % (', '.join(['%.3f' % l for l in log_lam]), scatter)) # depends on [control=['for'], data=['p']]
# The best solution
log.info('Found minimum: logL = (%s), s = %.3f' % (', '.join(['%.3f' % l for l in log_lam_opt]), scatter_opt))
self.lam[b] = 10 ** log_lam_opt # depends on [control=['for'], data=[]]
# We're just going to plot lambda as a function of chunk number
bs = np.arange(len(self.breakpoints))
color = ['k', 'b', 'r', 'g', 'y']
for n in range(self.pld_order):
ax[0].plot(bs + 1, [np.log10(self.lam[b][n]) for b in bs], '.', color=color[n])
ax[0].plot(bs + 1, [np.log10(self.lam[b][n]) for b in bs], '-', color=color[n], alpha=0.25) # depends on [control=['for'], data=['n']]
ax[0].set_ylabel('$\\log\\Lambda$', fontsize=5)
ax[0].margins(0.1, 0.1)
ax[0].set_xticks(np.arange(1, len(self.breakpoints) + 1))
ax[0].set_xticklabels([])
# Now plot the CDPP
cdpp_arr = self.get_cdpp_arr()
ax[1].plot(bs + 1, cdpp_arr, 'b.')
ax[1].plot(bs + 1, cdpp_arr, 'b-', alpha=0.25)
ax[1].margins(0.1, 0.1)
ax[1].set_ylabel('Scatter (ppm)', fontsize=5)
ax[1].set_xlabel('Chunk', fontsize=5)
ax[1].set_xticks(np.arange(1, len(self.breakpoints) + 1))
|
def auto_display_limits(self):
"""Calculate best display limits and set them."""
display_data_and_metadata = self.get_calculated_display_values(True).display_data_and_metadata
data = display_data_and_metadata.data if display_data_and_metadata else None
if data is not None:
# The old algorithm was a problem during EELS where the signal data
# is a small percentage of the overall data and was falling outside
# the included range. This is the new simplified algorithm. Future
# feature may allow user to select more complex algorithms.
mn, mx = numpy.nanmin(data), numpy.nanmax(data)
self.display_limits = mn, mx
|
def function[auto_display_limits, parameter[self]]:
constant[Calculate best display limits and set them.]
variable[display_data_and_metadata] assign[=] call[name[self].get_calculated_display_values, parameter[constant[True]]].display_data_and_metadata
variable[data] assign[=] <ast.IfExp object at 0x7da1b0ec6ec0>
if compare[name[data] is_not constant[None]] begin[:]
<ast.Tuple object at 0x7da1b0ec7220> assign[=] tuple[[<ast.Call object at 0x7da1b0ec5ae0>, <ast.Call object at 0x7da1b0ec72e0>]]
name[self].display_limits assign[=] tuple[[<ast.Name object at 0x7da1b0ec43a0>, <ast.Name object at 0x7da1b0ec5480>]]
|
keyword[def] identifier[auto_display_limits] ( identifier[self] ):
literal[string]
identifier[display_data_and_metadata] = identifier[self] . identifier[get_calculated_display_values] ( keyword[True] ). identifier[display_data_and_metadata]
identifier[data] = identifier[display_data_and_metadata] . identifier[data] keyword[if] identifier[display_data_and_metadata] keyword[else] keyword[None]
keyword[if] identifier[data] keyword[is] keyword[not] keyword[None] :
identifier[mn] , identifier[mx] = identifier[numpy] . identifier[nanmin] ( identifier[data] ), identifier[numpy] . identifier[nanmax] ( identifier[data] )
identifier[self] . identifier[display_limits] = identifier[mn] , identifier[mx]
|
def auto_display_limits(self):
"""Calculate best display limits and set them."""
display_data_and_metadata = self.get_calculated_display_values(True).display_data_and_metadata
data = display_data_and_metadata.data if display_data_and_metadata else None
if data is not None:
# The old algorithm was a problem during EELS where the signal data
# is a small percentage of the overall data and was falling outside
# the included range. This is the new simplified algorithm. Future
# feature may allow user to select more complex algorithms.
(mn, mx) = (numpy.nanmin(data), numpy.nanmax(data))
self.display_limits = (mn, mx) # depends on [control=['if'], data=['data']]
|
def halt(self, checkpoint=None, finished=False, raise_error=True):
"""
Stop the pipeline before completion point.
:param str checkpoint: Name of stage just reached or just completed.
:param bool finished: Whether the indicated stage was just finished
(True), or just reached (False)
:param bool raise_error: Whether to raise an exception to truly
halt execution.
"""
self.stop_pipeline(PAUSE_FLAG)
self._active = False
if raise_error:
raise PipelineHalt(checkpoint, finished)
|
def function[halt, parameter[self, checkpoint, finished, raise_error]]:
constant[
Stop the pipeline before completion point.
:param str checkpoint: Name of stage just reached or just completed.
:param bool finished: Whether the indicated stage was just finished
(True), or just reached (False)
:param bool raise_error: Whether to raise an exception to truly
halt execution.
]
call[name[self].stop_pipeline, parameter[name[PAUSE_FLAG]]]
name[self]._active assign[=] constant[False]
if name[raise_error] begin[:]
<ast.Raise object at 0x7da1b032b910>
|
keyword[def] identifier[halt] ( identifier[self] , identifier[checkpoint] = keyword[None] , identifier[finished] = keyword[False] , identifier[raise_error] = keyword[True] ):
literal[string]
identifier[self] . identifier[stop_pipeline] ( identifier[PAUSE_FLAG] )
identifier[self] . identifier[_active] = keyword[False]
keyword[if] identifier[raise_error] :
keyword[raise] identifier[PipelineHalt] ( identifier[checkpoint] , identifier[finished] )
|
def halt(self, checkpoint=None, finished=False, raise_error=True):
"""
Stop the pipeline before completion point.
:param str checkpoint: Name of stage just reached or just completed.
:param bool finished: Whether the indicated stage was just finished
(True), or just reached (False)
:param bool raise_error: Whether to raise an exception to truly
halt execution.
"""
self.stop_pipeline(PAUSE_FLAG)
self._active = False
if raise_error:
raise PipelineHalt(checkpoint, finished) # depends on [control=['if'], data=[]]
|
def _check_create_parameters(self, **kwargs):
"""Override method for one in resource.py to check partition
The partition cannot be included as a parameter to create a guest.
Raise an exception if a consumer gives the partition parameter.
:raises: DisallowedCreationParameter
"""
if 'partition' in kwargs:
msg = "'partition' is not allowed as a create parameter. Vcmp " \
"guests are created with the 'name' at least."
raise DisallowedCreationParameter(msg)
super(Guest, self)._check_create_parameters(**kwargs)
|
def function[_check_create_parameters, parameter[self]]:
constant[Override method for one in resource.py to check partition
The partition cannot be included as a parameter to create a guest.
Raise an exception if a consumer gives the partition parameter.
:raises: DisallowedCreationParameter
]
if compare[constant[partition] in name[kwargs]] begin[:]
variable[msg] assign[=] constant['partition' is not allowed as a create parameter. Vcmp guests are created with the 'name' at least.]
<ast.Raise object at 0x7da204347c70>
call[call[name[super], parameter[name[Guest], name[self]]]._check_create_parameters, parameter[]]
|
keyword[def] identifier[_check_create_parameters] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[msg] = literal[string] literal[string]
keyword[raise] identifier[DisallowedCreationParameter] ( identifier[msg] )
identifier[super] ( identifier[Guest] , identifier[self] ). identifier[_check_create_parameters] (** identifier[kwargs] )
|
def _check_create_parameters(self, **kwargs):
"""Override method for one in resource.py to check partition
The partition cannot be included as a parameter to create a guest.
Raise an exception if a consumer gives the partition parameter.
:raises: DisallowedCreationParameter
"""
if 'partition' in kwargs:
msg = "'partition' is not allowed as a create parameter. Vcmp guests are created with the 'name' at least."
raise DisallowedCreationParameter(msg) # depends on [control=['if'], data=[]]
super(Guest, self)._check_create_parameters(**kwargs)
|
def _connection_parameters(self):
"""Return connection parameters for a pika connection.
:rtype: pika.ConnectionParameters
"""
return pika.ConnectionParameters(
self.config.get('host', 'localhost'),
self.config.get('port', 5672),
self.config.get('vhost', '/'),
pika.PlainCredentials(
self.config.get('user', 'guest'),
self.config.get('password', self.config.get('pass', 'guest'))),
ssl=self.config.get('ssl', False),
frame_max=self.config.get('frame_max', spec.FRAME_MAX_SIZE),
socket_timeout=self.config.get('socket_timeout', 10),
heartbeat_interval=self.config.get(
'heartbeat_interval', self.HB_INTERVAL))
|
def function[_connection_parameters, parameter[self]]:
constant[Return connection parameters for a pika connection.
:rtype: pika.ConnectionParameters
]
return[call[name[pika].ConnectionParameters, parameter[call[name[self].config.get, parameter[constant[host], constant[localhost]]], call[name[self].config.get, parameter[constant[port], constant[5672]]], call[name[self].config.get, parameter[constant[vhost], constant[/]]], call[name[pika].PlainCredentials, parameter[call[name[self].config.get, parameter[constant[user], constant[guest]]], call[name[self].config.get, parameter[constant[password], call[name[self].config.get, parameter[constant[pass], constant[guest]]]]]]]]]]
|
keyword[def] identifier[_connection_parameters] ( identifier[self] ):
literal[string]
keyword[return] identifier[pika] . identifier[ConnectionParameters] (
identifier[self] . identifier[config] . identifier[get] ( literal[string] , literal[string] ),
identifier[self] . identifier[config] . identifier[get] ( literal[string] , literal[int] ),
identifier[self] . identifier[config] . identifier[get] ( literal[string] , literal[string] ),
identifier[pika] . identifier[PlainCredentials] (
identifier[self] . identifier[config] . identifier[get] ( literal[string] , literal[string] ),
identifier[self] . identifier[config] . identifier[get] ( literal[string] , identifier[self] . identifier[config] . identifier[get] ( literal[string] , literal[string] ))),
identifier[ssl] = identifier[self] . identifier[config] . identifier[get] ( literal[string] , keyword[False] ),
identifier[frame_max] = identifier[self] . identifier[config] . identifier[get] ( literal[string] , identifier[spec] . identifier[FRAME_MAX_SIZE] ),
identifier[socket_timeout] = identifier[self] . identifier[config] . identifier[get] ( literal[string] , literal[int] ),
identifier[heartbeat_interval] = identifier[self] . identifier[config] . identifier[get] (
literal[string] , identifier[self] . identifier[HB_INTERVAL] ))
|
def _connection_parameters(self):
"""Return connection parameters for a pika connection.
:rtype: pika.ConnectionParameters
"""
return pika.ConnectionParameters(self.config.get('host', 'localhost'), self.config.get('port', 5672), self.config.get('vhost', '/'), pika.PlainCredentials(self.config.get('user', 'guest'), self.config.get('password', self.config.get('pass', 'guest'))), ssl=self.config.get('ssl', False), frame_max=self.config.get('frame_max', spec.FRAME_MAX_SIZE), socket_timeout=self.config.get('socket_timeout', 10), heartbeat_interval=self.config.get('heartbeat_interval', self.HB_INTERVAL))
|
def dot_product(t1, t2, keep_dims=False, name=None, reduction_dim=None):
"""Computes the dot product of t1 and t2.
Args:
t1: A rank 2 tensor.
t2: A tensor that is the same size as t1.
keep_dims: If true, reduction does not change the rank of the input.
name: Optional name for this op.
reduction_dim: The dimension to reduce, by default choose the last one
and if no shape is specified guess 1.
Returns:
The dot product.
"""
with tf.name_scope(name, 'dot', [t1, t2]) as scope:
t1 = tf.convert_to_tensor(t1, name='t1')
t2 = tf.convert_to_tensor(t2, name='t2')
mul = tf.multiply(t1, t2)
if not reduction_dim:
reduction_dim = _last_index(mul, 1)
return tf.reduce_sum(mul, reduction_dim, name=scope, keep_dims=keep_dims)
|
def function[dot_product, parameter[t1, t2, keep_dims, name, reduction_dim]]:
constant[Computes the dot product of t1 and t2.
Args:
t1: A rank 2 tensor.
t2: A tensor that is the same size as t1.
keep_dims: If true, reduction does not change the rank of the input.
name: Optional name for this op.
reduction_dim: The dimension to reduce, by default choose the last one
and if no shape is specified guess 1.
Returns:
The dot product.
]
with call[name[tf].name_scope, parameter[name[name], constant[dot], list[[<ast.Name object at 0x7da18dc04760>, <ast.Name object at 0x7da18dc05f30>]]]] begin[:]
variable[t1] assign[=] call[name[tf].convert_to_tensor, parameter[name[t1]]]
variable[t2] assign[=] call[name[tf].convert_to_tensor, parameter[name[t2]]]
variable[mul] assign[=] call[name[tf].multiply, parameter[name[t1], name[t2]]]
if <ast.UnaryOp object at 0x7da18dc05e10> begin[:]
variable[reduction_dim] assign[=] call[name[_last_index], parameter[name[mul], constant[1]]]
return[call[name[tf].reduce_sum, parameter[name[mul], name[reduction_dim]]]]
|
keyword[def] identifier[dot_product] ( identifier[t1] , identifier[t2] , identifier[keep_dims] = keyword[False] , identifier[name] = keyword[None] , identifier[reduction_dim] = keyword[None] ):
literal[string]
keyword[with] identifier[tf] . identifier[name_scope] ( identifier[name] , literal[string] ,[ identifier[t1] , identifier[t2] ]) keyword[as] identifier[scope] :
identifier[t1] = identifier[tf] . identifier[convert_to_tensor] ( identifier[t1] , identifier[name] = literal[string] )
identifier[t2] = identifier[tf] . identifier[convert_to_tensor] ( identifier[t2] , identifier[name] = literal[string] )
identifier[mul] = identifier[tf] . identifier[multiply] ( identifier[t1] , identifier[t2] )
keyword[if] keyword[not] identifier[reduction_dim] :
identifier[reduction_dim] = identifier[_last_index] ( identifier[mul] , literal[int] )
keyword[return] identifier[tf] . identifier[reduce_sum] ( identifier[mul] , identifier[reduction_dim] , identifier[name] = identifier[scope] , identifier[keep_dims] = identifier[keep_dims] )
|
def dot_product(t1, t2, keep_dims=False, name=None, reduction_dim=None):
"""Computes the dot product of t1 and t2.
Args:
t1: A rank 2 tensor.
t2: A tensor that is the same size as t1.
keep_dims: If true, reduction does not change the rank of the input.
name: Optional name for this op.
reduction_dim: The dimension to reduce, by default choose the last one
and if no shape is specified guess 1.
Returns:
The dot product.
"""
with tf.name_scope(name, 'dot', [t1, t2]) as scope:
t1 = tf.convert_to_tensor(t1, name='t1')
t2 = tf.convert_to_tensor(t2, name='t2')
mul = tf.multiply(t1, t2)
if not reduction_dim:
reduction_dim = _last_index(mul, 1) # depends on [control=['if'], data=[]]
return tf.reduce_sum(mul, reduction_dim, name=scope, keep_dims=keep_dims) # depends on [control=['with'], data=['scope']]
|
def _clean_path(path):
"""Create a fully fissile absolute system path with no symbolic links and environment variables"""
path = path.replace('"', '')
path = path.replace("'", '')
# Replace ~ with /home/user
path = os.path.expanduser(path)
# Replace environment variables
path = os.path.expandvars(path)
# If the path is relative, assume it is relative to the config file directory
if not os.path.isabs(path):
path = os.path.join(config.global_config.path, path)
# Clean path, e.g. replace /./ with /
path = os.path.abspath(path)
# Eliminate symbolic links
path = os.path.realpath(path)
return path
|
def function[_clean_path, parameter[path]]:
constant[Create a fully fissile absolute system path with no symbolic links and environment variables]
variable[path] assign[=] call[name[path].replace, parameter[constant["], constant[]]]
variable[path] assign[=] call[name[path].replace, parameter[constant['], constant[]]]
variable[path] assign[=] call[name[os].path.expanduser, parameter[name[path]]]
variable[path] assign[=] call[name[os].path.expandvars, parameter[name[path]]]
if <ast.UnaryOp object at 0x7da2041db2e0> begin[:]
variable[path] assign[=] call[name[os].path.join, parameter[name[config].global_config.path, name[path]]]
variable[path] assign[=] call[name[os].path.abspath, parameter[name[path]]]
variable[path] assign[=] call[name[os].path.realpath, parameter[name[path]]]
return[name[path]]
|
keyword[def] identifier[_clean_path] ( identifier[path] ):
literal[string]
identifier[path] = identifier[path] . identifier[replace] ( literal[string] , literal[string] )
identifier[path] = identifier[path] . identifier[replace] ( literal[string] , literal[string] )
identifier[path] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[path] )
identifier[path] = identifier[os] . identifier[path] . identifier[expandvars] ( identifier[path] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isabs] ( identifier[path] ):
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[config] . identifier[global_config] . identifier[path] , identifier[path] )
identifier[path] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[path] )
identifier[path] = identifier[os] . identifier[path] . identifier[realpath] ( identifier[path] )
keyword[return] identifier[path]
|
def _clean_path(path):
"""Create a fully fissile absolute system path with no symbolic links and environment variables"""
path = path.replace('"', '')
path = path.replace("'", '')
# Replace ~ with /home/user
path = os.path.expanduser(path)
# Replace environment variables
path = os.path.expandvars(path)
# If the path is relative, assume it is relative to the config file directory
if not os.path.isabs(path):
path = os.path.join(config.global_config.path, path) # depends on [control=['if'], data=[]]
# Clean path, e.g. replace /./ with /
path = os.path.abspath(path)
# Eliminate symbolic links
path = os.path.realpath(path)
return path
|
def copy_snapshot(kwargs=None, call=None):
'''
Copy a snapshot
'''
if call != 'function':
log.error(
'The copy_snapshot function must be called with -f or --function.'
)
return False
if 'source_region' not in kwargs:
log.error('A source_region must be specified to copy a snapshot.')
return False
if 'source_snapshot_id' not in kwargs:
log.error('A source_snapshot_id must be specified to copy a snapshot.')
return False
if 'description' not in kwargs:
kwargs['description'] = ''
params = {'Action': 'CopySnapshot'}
if 'source_region' in kwargs:
params['SourceRegion'] = kwargs['source_region']
if 'source_snapshot_id' in kwargs:
params['SourceSnapshotId'] = kwargs['source_snapshot_id']
if 'description' in kwargs:
params['Description'] = kwargs['description']
log.debug(params)
data = aws.query(params,
return_url=True,
location=get_location(),
provider=get_provider(),
opts=__opts__,
sigver='4')
return data
|
def function[copy_snapshot, parameter[kwargs, call]]:
constant[
Copy a snapshot
]
if compare[name[call] not_equal[!=] constant[function]] begin[:]
call[name[log].error, parameter[constant[The copy_snapshot function must be called with -f or --function.]]]
return[constant[False]]
if compare[constant[source_region] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:]
call[name[log].error, parameter[constant[A source_region must be specified to copy a snapshot.]]]
return[constant[False]]
if compare[constant[source_snapshot_id] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:]
call[name[log].error, parameter[constant[A source_snapshot_id must be specified to copy a snapshot.]]]
return[constant[False]]
if compare[constant[description] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:]
call[name[kwargs]][constant[description]] assign[=] constant[]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b2345090>], [<ast.Constant object at 0x7da1b2347ac0>]]
if compare[constant[source_region] in name[kwargs]] begin[:]
call[name[params]][constant[SourceRegion]] assign[=] call[name[kwargs]][constant[source_region]]
if compare[constant[source_snapshot_id] in name[kwargs]] begin[:]
call[name[params]][constant[SourceSnapshotId]] assign[=] call[name[kwargs]][constant[source_snapshot_id]]
if compare[constant[description] in name[kwargs]] begin[:]
call[name[params]][constant[Description]] assign[=] call[name[kwargs]][constant[description]]
call[name[log].debug, parameter[name[params]]]
variable[data] assign[=] call[name[aws].query, parameter[name[params]]]
return[name[data]]
|
keyword[def] identifier[copy_snapshot] ( identifier[kwargs] = keyword[None] , identifier[call] = keyword[None] ):
literal[string]
keyword[if] identifier[call] != literal[string] :
identifier[log] . identifier[error] (
literal[string]
)
keyword[return] keyword[False]
keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] :
identifier[log] . identifier[error] ( literal[string] )
keyword[return] keyword[False]
keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] :
identifier[log] . identifier[error] ( literal[string] )
keyword[return] keyword[False]
keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] :
identifier[kwargs] [ literal[string] ]= literal[string]
identifier[params] ={ literal[string] : literal[string] }
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[params] [ literal[string] ]= identifier[kwargs] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[params] [ literal[string] ]= identifier[kwargs] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[params] [ literal[string] ]= identifier[kwargs] [ literal[string] ]
identifier[log] . identifier[debug] ( identifier[params] )
identifier[data] = identifier[aws] . identifier[query] ( identifier[params] ,
identifier[return_url] = keyword[True] ,
identifier[location] = identifier[get_location] (),
identifier[provider] = identifier[get_provider] (),
identifier[opts] = identifier[__opts__] ,
identifier[sigver] = literal[string] )
keyword[return] identifier[data]
|
def copy_snapshot(kwargs=None, call=None):
"""
Copy a snapshot
"""
if call != 'function':
log.error('The copy_snapshot function must be called with -f or --function.')
return False # depends on [control=['if'], data=[]]
if 'source_region' not in kwargs:
log.error('A source_region must be specified to copy a snapshot.')
return False # depends on [control=['if'], data=[]]
if 'source_snapshot_id' not in kwargs:
log.error('A source_snapshot_id must be specified to copy a snapshot.')
return False # depends on [control=['if'], data=[]]
if 'description' not in kwargs:
kwargs['description'] = '' # depends on [control=['if'], data=['kwargs']]
params = {'Action': 'CopySnapshot'}
if 'source_region' in kwargs:
params['SourceRegion'] = kwargs['source_region'] # depends on [control=['if'], data=['kwargs']]
if 'source_snapshot_id' in kwargs:
params['SourceSnapshotId'] = kwargs['source_snapshot_id'] # depends on [control=['if'], data=['kwargs']]
if 'description' in kwargs:
params['Description'] = kwargs['description'] # depends on [control=['if'], data=['kwargs']]
log.debug(params)
data = aws.query(params, return_url=True, location=get_location(), provider=get_provider(), opts=__opts__, sigver='4')
return data
|
def WriteClientGraphSeries(self, graph_series,
client_label,
timestamp):
"""See db.Database."""
series_key = (client_label, graph_series.report_type, timestamp.Copy())
self.client_graph_series[series_key] = graph_series.Copy()
|
def function[WriteClientGraphSeries, parameter[self, graph_series, client_label, timestamp]]:
constant[See db.Database.]
variable[series_key] assign[=] tuple[[<ast.Name object at 0x7da1b1b6e0b0>, <ast.Attribute object at 0x7da1b1b6f490>, <ast.Call object at 0x7da1b1b6c970>]]
call[name[self].client_graph_series][name[series_key]] assign[=] call[name[graph_series].Copy, parameter[]]
|
keyword[def] identifier[WriteClientGraphSeries] ( identifier[self] , identifier[graph_series] ,
identifier[client_label] ,
identifier[timestamp] ):
literal[string]
identifier[series_key] =( identifier[client_label] , identifier[graph_series] . identifier[report_type] , identifier[timestamp] . identifier[Copy] ())
identifier[self] . identifier[client_graph_series] [ identifier[series_key] ]= identifier[graph_series] . identifier[Copy] ()
|
def WriteClientGraphSeries(self, graph_series, client_label, timestamp):
"""See db.Database."""
series_key = (client_label, graph_series.report_type, timestamp.Copy())
self.client_graph_series[series_key] = graph_series.Copy()
|
def Lombardi_Pedrocchi(m, x, rhol, rhog, sigma, D, L=1):
r'''Calculates two-phase pressure drop with the Lombardi-Pedrocchi (1972)
correlation from [1]_ as shown in [2]_ and [3]_.
.. math::
\Delta P_{tp} = \frac{0.83 G_{tp}^{1.4} \sigma^{0.4} L}{D^{1.2}
\rho_{h}^{0.866}}
Parameters
----------
m : float
Mass flow rate of fluid, [kg/s]
x : float
Quality of fluid, [-]
rhol : float
Liquid density, [kg/m^3]
rhog : float
Gas density, [kg/m^3]
sigma : float
Surface tension, [N/m]
D : float
Diameter of pipe, [m]
L : float, optional
Length of pipe, [m]
Returns
-------
dP : float
Pressure drop of the two-phase flow, [Pa]
Notes
-----
This is a purely empirical method. [3]_ presents a review of this and other
correlations. It did not perform best, but there were also correlations
worse than it.
Examples
--------
>>> Lombardi_Pedrocchi(m=0.6, x=0.1, rhol=915., rhog=2.67, sigma=0.045,
... D=0.05, L=1)
1567.328374498781
References
----------
.. [1] Lombardi, C., and E. Pedrocchi. "Pressure Drop Correlation in Two-
Phase Flow." Energ. Nucl. (Milan) 19: No. 2, 91-99, January 1, 1972.
.. [2] Mekisso, Henock Mateos. "Comparison of Frictional Pressure Drop
Correlations for Isothermal Two-Phase Horizontal Flow." Thesis, Oklahoma
State University, 2013. https://shareok.org/handle/11244/11109.
.. [3] Turgut, Oğuz Emrah, Mustafa Turhan Çoban, and Mustafa Asker.
"Comparison of Flow Boiling Pressure Drop Correlations for Smooth
Macrotubes." Heat Transfer Engineering 37, no. 6 (April 12, 2016):
487-506. doi:10.1080/01457632.2015.1060733.
'''
voidage_h = homogeneous(x, rhol, rhog)
rho_h = rhol*(1-voidage_h) + rhog*voidage_h
G_tp = m/(pi/4*D**2)
return 0.83*G_tp**1.4*sigma**0.4*L/(D**1.2*rho_h**0.866)
|
def function[Lombardi_Pedrocchi, parameter[m, x, rhol, rhog, sigma, D, L]]:
constant[Calculates two-phase pressure drop with the Lombardi-Pedrocchi (1972)
correlation from [1]_ as shown in [2]_ and [3]_.
.. math::
\Delta P_{tp} = \frac{0.83 G_{tp}^{1.4} \sigma^{0.4} L}{D^{1.2}
\rho_{h}^{0.866}}
Parameters
----------
m : float
Mass flow rate of fluid, [kg/s]
x : float
Quality of fluid, [-]
rhol : float
Liquid density, [kg/m^3]
rhog : float
Gas density, [kg/m^3]
sigma : float
Surface tension, [N/m]
D : float
Diameter of pipe, [m]
L : float, optional
Length of pipe, [m]
Returns
-------
dP : float
Pressure drop of the two-phase flow, [Pa]
Notes
-----
This is a purely empirical method. [3]_ presents a review of this and other
correlations. It did not perform best, but there were also correlations
worse than it.
Examples
--------
>>> Lombardi_Pedrocchi(m=0.6, x=0.1, rhol=915., rhog=2.67, sigma=0.045,
... D=0.05, L=1)
1567.328374498781
References
----------
.. [1] Lombardi, C., and E. Pedrocchi. "Pressure Drop Correlation in Two-
Phase Flow." Energ. Nucl. (Milan) 19: No. 2, 91-99, January 1, 1972.
.. [2] Mekisso, Henock Mateos. "Comparison of Frictional Pressure Drop
Correlations for Isothermal Two-Phase Horizontal Flow." Thesis, Oklahoma
State University, 2013. https://shareok.org/handle/11244/11109.
.. [3] Turgut, Oğuz Emrah, Mustafa Turhan Çoban, and Mustafa Asker.
"Comparison of Flow Boiling Pressure Drop Correlations for Smooth
Macrotubes." Heat Transfer Engineering 37, no. 6 (April 12, 2016):
487-506. doi:10.1080/01457632.2015.1060733.
]
variable[voidage_h] assign[=] call[name[homogeneous], parameter[name[x], name[rhol], name[rhog]]]
variable[rho_h] assign[=] binary_operation[binary_operation[name[rhol] * binary_operation[constant[1] - name[voidage_h]]] + binary_operation[name[rhog] * name[voidage_h]]]
variable[G_tp] assign[=] binary_operation[name[m] / binary_operation[binary_operation[name[pi] / constant[4]] * binary_operation[name[D] ** constant[2]]]]
return[binary_operation[binary_operation[binary_operation[binary_operation[constant[0.83] * binary_operation[name[G_tp] ** constant[1.4]]] * binary_operation[name[sigma] ** constant[0.4]]] * name[L]] / binary_operation[binary_operation[name[D] ** constant[1.2]] * binary_operation[name[rho_h] ** constant[0.866]]]]]
|
keyword[def] identifier[Lombardi_Pedrocchi] ( identifier[m] , identifier[x] , identifier[rhol] , identifier[rhog] , identifier[sigma] , identifier[D] , identifier[L] = literal[int] ):
literal[string]
identifier[voidage_h] = identifier[homogeneous] ( identifier[x] , identifier[rhol] , identifier[rhog] )
identifier[rho_h] = identifier[rhol] *( literal[int] - identifier[voidage_h] )+ identifier[rhog] * identifier[voidage_h]
identifier[G_tp] = identifier[m] /( identifier[pi] / literal[int] * identifier[D] ** literal[int] )
keyword[return] literal[int] * identifier[G_tp] ** literal[int] * identifier[sigma] ** literal[int] * identifier[L] /( identifier[D] ** literal[int] * identifier[rho_h] ** literal[int] )
|
def Lombardi_Pedrocchi(m, x, rhol, rhog, sigma, D, L=1):
"""Calculates two-phase pressure drop with the Lombardi-Pedrocchi (1972)
correlation from [1]_ as shown in [2]_ and [3]_.
.. math::
\\Delta P_{tp} = \\frac{0.83 G_{tp}^{1.4} \\sigma^{0.4} L}{D^{1.2}
\\rho_{h}^{0.866}}
Parameters
----------
m : float
Mass flow rate of fluid, [kg/s]
x : float
Quality of fluid, [-]
rhol : float
Liquid density, [kg/m^3]
rhog : float
Gas density, [kg/m^3]
sigma : float
Surface tension, [N/m]
D : float
Diameter of pipe, [m]
L : float, optional
Length of pipe, [m]
Returns
-------
dP : float
Pressure drop of the two-phase flow, [Pa]
Notes
-----
This is a purely empirical method. [3]_ presents a review of this and other
correlations. It did not perform best, but there were also correlations
worse than it.
Examples
--------
>>> Lombardi_Pedrocchi(m=0.6, x=0.1, rhol=915., rhog=2.67, sigma=0.045,
... D=0.05, L=1)
1567.328374498781
References
----------
.. [1] Lombardi, C., and E. Pedrocchi. "Pressure Drop Correlation in Two-
Phase Flow." Energ. Nucl. (Milan) 19: No. 2, 91-99, January 1, 1972.
.. [2] Mekisso, Henock Mateos. "Comparison of Frictional Pressure Drop
Correlations for Isothermal Two-Phase Horizontal Flow." Thesis, Oklahoma
State University, 2013. https://shareok.org/handle/11244/11109.
.. [3] Turgut, Oğuz Emrah, Mustafa Turhan Çoban, and Mustafa Asker.
"Comparison of Flow Boiling Pressure Drop Correlations for Smooth
Macrotubes." Heat Transfer Engineering 37, no. 6 (April 12, 2016):
487-506. doi:10.1080/01457632.2015.1060733.
"""
voidage_h = homogeneous(x, rhol, rhog)
rho_h = rhol * (1 - voidage_h) + rhog * voidage_h
G_tp = m / (pi / 4 * D ** 2)
return 0.83 * G_tp ** 1.4 * sigma ** 0.4 * L / (D ** 1.2 * rho_h ** 0.866)
|
def com_find2D(ar_grid, **kwargs):
"""
ARGS
**kwargs
ordering = 'rc' or 'xy' order the return either in (x,y)
or (row, col) order.
indexing = 'zero' or 'one' return positions relative to zero (i.e.
python addressing) or one (i.e. MatLAB
addressing)
DESC
Find the center of mass in 2D array grid <ar_grid>. Mass elements
are grid index values.
By using python idioms, his version is MUCH faster than the com_find()
"""
b_reorder = True
b_oneOffset = True
for key, value in kwargs.iteritems():
if key == 'ordering' and value == 'rc': b_reorder = False
if key == 'ordering' and value == 'xy': b_reorder = True
if key == 'indexing' and value == 'zero': b_oneOffset = False
if key == 'indexing' and value == 'one': b_oneOffset = True
f_Smass = ar_grid.sum()
f_comX = (ar_grid[nonzero(ar_grid)] * (nonzero(ar_grid)[1] + 1)).sum() / f_Smass
f_comY = (ar_grid[nonzero(ar_grid)] * (nonzero(ar_grid)[0] + 1)).sum() / f_Smass
if b_reorder: ar_ret = array( (f_comX, f_comY) )
if not b_reorder: ar_ret = array( (f_comY, f_comX) )
if not b_oneOffset: ar_ret -= 1.0
return ar_ret
|
def function[com_find2D, parameter[ar_grid]]:
constant[
ARGS
**kwargs
ordering = 'rc' or 'xy' order the return either in (x,y)
or (row, col) order.
indexing = 'zero' or 'one' return positions relative to zero (i.e.
python addressing) or one (i.e. MatLAB
addressing)
DESC
Find the center of mass in 2D array grid <ar_grid>. Mass elements
are grid index values.
By using python idioms, his version is MUCH faster than the com_find()
]
variable[b_reorder] assign[=] constant[True]
variable[b_oneOffset] assign[=] constant[True]
for taget[tuple[[<ast.Name object at 0x7da1b0845d50>, <ast.Name object at 0x7da1b0847880>]]] in starred[call[name[kwargs].iteritems, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b0845030> begin[:]
variable[b_reorder] assign[=] constant[False]
if <ast.BoolOp object at 0x7da1b0844220> begin[:]
variable[b_reorder] assign[=] constant[True]
if <ast.BoolOp object at 0x7da1b0846a10> begin[:]
variable[b_oneOffset] assign[=] constant[False]
if <ast.BoolOp object at 0x7da1b0847370> begin[:]
variable[b_oneOffset] assign[=] constant[True]
variable[f_Smass] assign[=] call[name[ar_grid].sum, parameter[]]
variable[f_comX] assign[=] binary_operation[call[binary_operation[call[name[ar_grid]][call[name[nonzero], parameter[name[ar_grid]]]] * binary_operation[call[call[name[nonzero], parameter[name[ar_grid]]]][constant[1]] + constant[1]]].sum, parameter[]] / name[f_Smass]]
variable[f_comY] assign[=] binary_operation[call[binary_operation[call[name[ar_grid]][call[name[nonzero], parameter[name[ar_grid]]]] * binary_operation[call[call[name[nonzero], parameter[name[ar_grid]]]][constant[0]] + constant[1]]].sum, parameter[]] / name[f_Smass]]
if name[b_reorder] begin[:]
variable[ar_ret] assign[=] call[name[array], parameter[tuple[[<ast.Name object at 0x7da1b08471f0>, <ast.Name object at 0x7da1b0844160>]]]]
if <ast.UnaryOp object at 0x7da1b0846c80> begin[:]
variable[ar_ret] assign[=] call[name[array], parameter[tuple[[<ast.Name object at 0x7da1b0847ac0>, <ast.Name object at 0x7da1b0845660>]]]]
if <ast.UnaryOp object at 0x7da1b0847610> begin[:]
<ast.AugAssign object at 0x7da1b0847970>
return[name[ar_ret]]
|
keyword[def] identifier[com_find2D] ( identifier[ar_grid] ,** identifier[kwargs] ):
literal[string]
identifier[b_reorder] = keyword[True]
identifier[b_oneOffset] = keyword[True]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[kwargs] . identifier[iteritems] ():
keyword[if] identifier[key] == literal[string] keyword[and] identifier[value] == literal[string] : identifier[b_reorder] = keyword[False]
keyword[if] identifier[key] == literal[string] keyword[and] identifier[value] == literal[string] : identifier[b_reorder] = keyword[True]
keyword[if] identifier[key] == literal[string] keyword[and] identifier[value] == literal[string] : identifier[b_oneOffset] = keyword[False]
keyword[if] identifier[key] == literal[string] keyword[and] identifier[value] == literal[string] : identifier[b_oneOffset] = keyword[True]
identifier[f_Smass] = identifier[ar_grid] . identifier[sum] ()
identifier[f_comX] =( identifier[ar_grid] [ identifier[nonzero] ( identifier[ar_grid] )]*( identifier[nonzero] ( identifier[ar_grid] )[ literal[int] ]+ literal[int] )). identifier[sum] ()/ identifier[f_Smass]
identifier[f_comY] =( identifier[ar_grid] [ identifier[nonzero] ( identifier[ar_grid] )]*( identifier[nonzero] ( identifier[ar_grid] )[ literal[int] ]+ literal[int] )). identifier[sum] ()/ identifier[f_Smass]
keyword[if] identifier[b_reorder] : identifier[ar_ret] = identifier[array] (( identifier[f_comX] , identifier[f_comY] ))
keyword[if] keyword[not] identifier[b_reorder] : identifier[ar_ret] = identifier[array] (( identifier[f_comY] , identifier[f_comX] ))
keyword[if] keyword[not] identifier[b_oneOffset] : identifier[ar_ret] -= literal[int]
keyword[return] identifier[ar_ret]
|
def com_find2D(ar_grid, **kwargs):
"""
ARGS
**kwargs
ordering = 'rc' or 'xy' order the return either in (x,y)
or (row, col) order.
indexing = 'zero' or 'one' return positions relative to zero (i.e.
python addressing) or one (i.e. MatLAB
addressing)
DESC
Find the center of mass in 2D array grid <ar_grid>. Mass elements
are grid index values.
By using python idioms, his version is MUCH faster than the com_find()
"""
b_reorder = True
b_oneOffset = True
for (key, value) in kwargs.iteritems():
if key == 'ordering' and value == 'rc':
b_reorder = False # depends on [control=['if'], data=[]]
if key == 'ordering' and value == 'xy':
b_reorder = True # depends on [control=['if'], data=[]]
if key == 'indexing' and value == 'zero':
b_oneOffset = False # depends on [control=['if'], data=[]]
if key == 'indexing' and value == 'one':
b_oneOffset = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
f_Smass = ar_grid.sum()
f_comX = (ar_grid[nonzero(ar_grid)] * (nonzero(ar_grid)[1] + 1)).sum() / f_Smass
f_comY = (ar_grid[nonzero(ar_grid)] * (nonzero(ar_grid)[0] + 1)).sum() / f_Smass
if b_reorder:
ar_ret = array((f_comX, f_comY)) # depends on [control=['if'], data=[]]
if not b_reorder:
ar_ret = array((f_comY, f_comX)) # depends on [control=['if'], data=[]]
if not b_oneOffset:
ar_ret -= 1.0 # depends on [control=['if'], data=[]]
return ar_ret
|
def modify_account(self, account, attrs):
"""
:param account: a zobjects.Account
:param attrs : a dictionary of attributes to set ({key:value,...})
"""
attrs = [{'n': k, '_content': v} for k, v in attrs.items()]
self.request('ModifyAccount', {
'id': self._get_or_fetch_id(account, self.get_account),
'a': attrs
})
|
def function[modify_account, parameter[self, account, attrs]]:
constant[
:param account: a zobjects.Account
:param attrs : a dictionary of attributes to set ({key:value,...})
]
variable[attrs] assign[=] <ast.ListComp object at 0x7da18dc06e00>
call[name[self].request, parameter[constant[ModifyAccount], dictionary[[<ast.Constant object at 0x7da18dc05b10>, <ast.Constant object at 0x7da18dc069e0>], [<ast.Call object at 0x7da18dc04700>, <ast.Name object at 0x7da18ede5ae0>]]]]
|
keyword[def] identifier[modify_account] ( identifier[self] , identifier[account] , identifier[attrs] ):
literal[string]
identifier[attrs] =[{ literal[string] : identifier[k] , literal[string] : identifier[v] } keyword[for] identifier[k] , identifier[v] keyword[in] identifier[attrs] . identifier[items] ()]
identifier[self] . identifier[request] ( literal[string] ,{
literal[string] : identifier[self] . identifier[_get_or_fetch_id] ( identifier[account] , identifier[self] . identifier[get_account] ),
literal[string] : identifier[attrs]
})
|
def modify_account(self, account, attrs):
"""
:param account: a zobjects.Account
:param attrs : a dictionary of attributes to set ({key:value,...})
"""
attrs = [{'n': k, '_content': v} for (k, v) in attrs.items()]
self.request('ModifyAccount', {'id': self._get_or_fetch_id(account, self.get_account), 'a': attrs})
|
def encode_list(cls, value):
"""
Encodes a list *value* into a string via base64 encoding.
"""
encoded = base64.b64encode(six.b(" ".join(str(v) for v in value) or "-"))
return encoded.decode("utf-8") if six.PY3 else encoded
|
def function[encode_list, parameter[cls, value]]:
constant[
Encodes a list *value* into a string via base64 encoding.
]
variable[encoded] assign[=] call[name[base64].b64encode, parameter[call[name[six].b, parameter[<ast.BoolOp object at 0x7da1b05eda20>]]]]
return[<ast.IfExp object at 0x7da1b05ecc40>]
|
keyword[def] identifier[encode_list] ( identifier[cls] , identifier[value] ):
literal[string]
identifier[encoded] = identifier[base64] . identifier[b64encode] ( identifier[six] . identifier[b] ( literal[string] . identifier[join] ( identifier[str] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[value] ) keyword[or] literal[string] ))
keyword[return] identifier[encoded] . identifier[decode] ( literal[string] ) keyword[if] identifier[six] . identifier[PY3] keyword[else] identifier[encoded]
|
def encode_list(cls, value):
"""
Encodes a list *value* into a string via base64 encoding.
"""
encoded = base64.b64encode(six.b(' '.join((str(v) for v in value)) or '-'))
return encoded.decode('utf-8') if six.PY3 else encoded
|
def get_locus(sequences, kir=False, verbose=False, refdata=None, evalue=10):
"""
Gets the locus of the sequence by running blastn
:param sequences: sequenences to blast
:param kir: bool whether the sequences are KIR or not
:rtype: ``str``
Example usage:
>>> from Bio.Seq import Seq
>>> from seqann.blast_cmd import get_locus
>>> sequence = Seq('AGAGACTCTCCCGAGGATTTCGTGTACCAGTTTAAGGCCATGTGCTACTTCACC')
>>> locus = get_locus(sequence)
"""
if not refdata:
refdata = ReferenceData()
file_id = str(randomid())
input_fasta = file_id + ".fasta"
output_xml = file_id + ".xml"
SeqIO.write(sequences, input_fasta, "fasta")
blastn_cline = NcbiblastnCommandline(query=input_fasta,
db=refdata.blastdb,
evalue=evalue,
outfmt=5,
reward=1,
penalty=-3,
gapopen=5,
gapextend=2,
dust='yes',
out=output_xml)
stdout, stderr = blastn_cline()
blast_qresult = SearchIO.read(output_xml, 'blast-xml')
# Delete files
cleanup(file_id)
if len(blast_qresult.hits) == 0:
return ''
loci = []
for i in range(0, 3):
if kir:
loci.append(blast_qresult[i].id.split("*")[0])
else:
loci.append(blast_qresult[i].id.split("*")[0])
locus = set(loci)
if len(locus) == 1:
if has_hla(loci[0]) or kir:
return loci[0]
else:
return "HLA-" + loci[0]
else:
return ''
|
def function[get_locus, parameter[sequences, kir, verbose, refdata, evalue]]:
constant[
Gets the locus of the sequence by running blastn
:param sequences: sequenences to blast
:param kir: bool whether the sequences are KIR or not
:rtype: ``str``
Example usage:
>>> from Bio.Seq import Seq
>>> from seqann.blast_cmd import get_locus
>>> sequence = Seq('AGAGACTCTCCCGAGGATTTCGTGTACCAGTTTAAGGCCATGTGCTACTTCACC')
>>> locus = get_locus(sequence)
]
if <ast.UnaryOp object at 0x7da1b258a860> begin[:]
variable[refdata] assign[=] call[name[ReferenceData], parameter[]]
variable[file_id] assign[=] call[name[str], parameter[call[name[randomid], parameter[]]]]
variable[input_fasta] assign[=] binary_operation[name[file_id] + constant[.fasta]]
variable[output_xml] assign[=] binary_operation[name[file_id] + constant[.xml]]
call[name[SeqIO].write, parameter[name[sequences], name[input_fasta], constant[fasta]]]
variable[blastn_cline] assign[=] call[name[NcbiblastnCommandline], parameter[]]
<ast.Tuple object at 0x7da1b25ee4d0> assign[=] call[name[blastn_cline], parameter[]]
variable[blast_qresult] assign[=] call[name[SearchIO].read, parameter[name[output_xml], constant[blast-xml]]]
call[name[cleanup], parameter[name[file_id]]]
if compare[call[name[len], parameter[name[blast_qresult].hits]] equal[==] constant[0]] begin[:]
return[constant[]]
variable[loci] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], constant[3]]]] begin[:]
if name[kir] begin[:]
call[name[loci].append, parameter[call[call[call[name[blast_qresult]][name[i]].id.split, parameter[constant[*]]]][constant[0]]]]
variable[locus] assign[=] call[name[set], parameter[name[loci]]]
if compare[call[name[len], parameter[name[locus]]] equal[==] constant[1]] begin[:]
if <ast.BoolOp object at 0x7da1b256f160> begin[:]
return[call[name[loci]][constant[0]]]
|
keyword[def] identifier[get_locus] ( identifier[sequences] , identifier[kir] = keyword[False] , identifier[verbose] = keyword[False] , identifier[refdata] = keyword[None] , identifier[evalue] = literal[int] ):
literal[string]
keyword[if] keyword[not] identifier[refdata] :
identifier[refdata] = identifier[ReferenceData] ()
identifier[file_id] = identifier[str] ( identifier[randomid] ())
identifier[input_fasta] = identifier[file_id] + literal[string]
identifier[output_xml] = identifier[file_id] + literal[string]
identifier[SeqIO] . identifier[write] ( identifier[sequences] , identifier[input_fasta] , literal[string] )
identifier[blastn_cline] = identifier[NcbiblastnCommandline] ( identifier[query] = identifier[input_fasta] ,
identifier[db] = identifier[refdata] . identifier[blastdb] ,
identifier[evalue] = identifier[evalue] ,
identifier[outfmt] = literal[int] ,
identifier[reward] = literal[int] ,
identifier[penalty] =- literal[int] ,
identifier[gapopen] = literal[int] ,
identifier[gapextend] = literal[int] ,
identifier[dust] = literal[string] ,
identifier[out] = identifier[output_xml] )
identifier[stdout] , identifier[stderr] = identifier[blastn_cline] ()
identifier[blast_qresult] = identifier[SearchIO] . identifier[read] ( identifier[output_xml] , literal[string] )
identifier[cleanup] ( identifier[file_id] )
keyword[if] identifier[len] ( identifier[blast_qresult] . identifier[hits] )== literal[int] :
keyword[return] literal[string]
identifier[loci] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , literal[int] ):
keyword[if] identifier[kir] :
identifier[loci] . identifier[append] ( identifier[blast_qresult] [ identifier[i] ]. identifier[id] . identifier[split] ( literal[string] )[ literal[int] ])
keyword[else] :
identifier[loci] . identifier[append] ( identifier[blast_qresult] [ identifier[i] ]. identifier[id] . identifier[split] ( literal[string] )[ literal[int] ])
identifier[locus] = identifier[set] ( identifier[loci] )
keyword[if] identifier[len] ( identifier[locus] )== literal[int] :
keyword[if] identifier[has_hla] ( identifier[loci] [ literal[int] ]) keyword[or] identifier[kir] :
keyword[return] identifier[loci] [ literal[int] ]
keyword[else] :
keyword[return] literal[string] + identifier[loci] [ literal[int] ]
keyword[else] :
keyword[return] literal[string]
|
def get_locus(sequences, kir=False, verbose=False, refdata=None, evalue=10):
"""
Gets the locus of the sequence by running blastn
:param sequences: sequenences to blast
:param kir: bool whether the sequences are KIR or not
:rtype: ``str``
Example usage:
>>> from Bio.Seq import Seq
>>> from seqann.blast_cmd import get_locus
>>> sequence = Seq('AGAGACTCTCCCGAGGATTTCGTGTACCAGTTTAAGGCCATGTGCTACTTCACC')
>>> locus = get_locus(sequence)
"""
if not refdata:
refdata = ReferenceData() # depends on [control=['if'], data=[]]
file_id = str(randomid())
input_fasta = file_id + '.fasta'
output_xml = file_id + '.xml'
SeqIO.write(sequences, input_fasta, 'fasta')
blastn_cline = NcbiblastnCommandline(query=input_fasta, db=refdata.blastdb, evalue=evalue, outfmt=5, reward=1, penalty=-3, gapopen=5, gapextend=2, dust='yes', out=output_xml)
(stdout, stderr) = blastn_cline()
blast_qresult = SearchIO.read(output_xml, 'blast-xml')
# Delete files
cleanup(file_id)
if len(blast_qresult.hits) == 0:
return '' # depends on [control=['if'], data=[]]
loci = []
for i in range(0, 3):
if kir:
loci.append(blast_qresult[i].id.split('*')[0]) # depends on [control=['if'], data=[]]
else:
loci.append(blast_qresult[i].id.split('*')[0]) # depends on [control=['for'], data=['i']]
locus = set(loci)
if len(locus) == 1:
if has_hla(loci[0]) or kir:
return loci[0] # depends on [control=['if'], data=[]]
else:
return 'HLA-' + loci[0] # depends on [control=['if'], data=[]]
else:
return ''
|
def group_bar(self, column_label, **vargs):
"""Plot a bar chart for the table.
The values of the specified column are grouped and counted, and one
bar is produced for each group.
Note: This differs from ``bar`` in that there is no need to specify
bar heights; the height of a category's bar is the number of copies
of that category in the given column. This method behaves more like
``hist`` in that regard, while ``bar`` behaves more like ``plot`` or
``scatter`` (which require the height of each point to be specified).
Args:
``column_label`` (str or int): The name or index of a column
Kwargs:
overlay (bool): create a chart with one color per data column;
if False, each will be displayed separately.
width (float): The width of the plot, in inches
height (float): The height of the plot, in inches
vargs: Additional arguments that get passed into `plt.bar`.
See http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.bar
for additional arguments that can be passed into vargs.
"""
self.group(column_label).bar(column_label, **vargs)
|
def function[group_bar, parameter[self, column_label]]:
constant[Plot a bar chart for the table.
The values of the specified column are grouped and counted, and one
bar is produced for each group.
Note: This differs from ``bar`` in that there is no need to specify
bar heights; the height of a category's bar is the number of copies
of that category in the given column. This method behaves more like
``hist`` in that regard, while ``bar`` behaves more like ``plot`` or
``scatter`` (which require the height of each point to be specified).
Args:
``column_label`` (str or int): The name or index of a column
Kwargs:
overlay (bool): create a chart with one color per data column;
if False, each will be displayed separately.
width (float): The width of the plot, in inches
height (float): The height of the plot, in inches
vargs: Additional arguments that get passed into `plt.bar`.
See http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.bar
for additional arguments that can be passed into vargs.
]
call[call[name[self].group, parameter[name[column_label]]].bar, parameter[name[column_label]]]
|
keyword[def] identifier[group_bar] ( identifier[self] , identifier[column_label] ,** identifier[vargs] ):
literal[string]
identifier[self] . identifier[group] ( identifier[column_label] ). identifier[bar] ( identifier[column_label] ,** identifier[vargs] )
|
def group_bar(self, column_label, **vargs):
"""Plot a bar chart for the table.
The values of the specified column are grouped and counted, and one
bar is produced for each group.
Note: This differs from ``bar`` in that there is no need to specify
bar heights; the height of a category's bar is the number of copies
of that category in the given column. This method behaves more like
``hist`` in that regard, while ``bar`` behaves more like ``plot`` or
``scatter`` (which require the height of each point to be specified).
Args:
``column_label`` (str or int): The name or index of a column
Kwargs:
overlay (bool): create a chart with one color per data column;
if False, each will be displayed separately.
width (float): The width of the plot, in inches
height (float): The height of the plot, in inches
vargs: Additional arguments that get passed into `plt.bar`.
See http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.bar
for additional arguments that can be passed into vargs.
"""
self.group(column_label).bar(column_label, **vargs)
|
def get_block_hash(self, height, id=None, endpoint=None):
"""
Get hash of a block by its height
Args:
height: (int) height of the block to lookup
id: (int, optional) id to use for response tracking
endpoint: (RPCEndpoint, optional) endpoint to specify to use
Returns:
json object of the result or the error encountered in the RPC call
"""
return self._call_endpoint(GET_BLOCK_HASH, params=[height], id=id, endpoint=endpoint)
|
def function[get_block_hash, parameter[self, height, id, endpoint]]:
constant[
Get hash of a block by its height
Args:
height: (int) height of the block to lookup
id: (int, optional) id to use for response tracking
endpoint: (RPCEndpoint, optional) endpoint to specify to use
Returns:
json object of the result or the error encountered in the RPC call
]
return[call[name[self]._call_endpoint, parameter[name[GET_BLOCK_HASH]]]]
|
keyword[def] identifier[get_block_hash] ( identifier[self] , identifier[height] , identifier[id] = keyword[None] , identifier[endpoint] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[_call_endpoint] ( identifier[GET_BLOCK_HASH] , identifier[params] =[ identifier[height] ], identifier[id] = identifier[id] , identifier[endpoint] = identifier[endpoint] )
|
def get_block_hash(self, height, id=None, endpoint=None):
"""
Get hash of a block by its height
Args:
height: (int) height of the block to lookup
id: (int, optional) id to use for response tracking
endpoint: (RPCEndpoint, optional) endpoint to specify to use
Returns:
json object of the result or the error encountered in the RPC call
"""
return self._call_endpoint(GET_BLOCK_HASH, params=[height], id=id, endpoint=endpoint)
|
def process_request(self, request):
"""
Sets the current request's ``urlconf`` attribute to the urlconf
associated with the subdomain, if it is listed in
``settings.SUBDOMAIN_URLCONFS``.
"""
super(SubdomainURLRoutingMiddleware, self).process_request(request)
subdomain = getattr(request, 'subdomain', UNSET)
if subdomain is not UNSET:
urlconf = settings.SUBDOMAIN_URLCONFS.get(subdomain)
if urlconf is not None:
logger.debug("Using urlconf %s for subdomain: %s",
repr(urlconf), repr(subdomain))
request.urlconf = urlconf
|
def function[process_request, parameter[self, request]]:
constant[
Sets the current request's ``urlconf`` attribute to the urlconf
associated with the subdomain, if it is listed in
``settings.SUBDOMAIN_URLCONFS``.
]
call[call[name[super], parameter[name[SubdomainURLRoutingMiddleware], name[self]]].process_request, parameter[name[request]]]
variable[subdomain] assign[=] call[name[getattr], parameter[name[request], constant[subdomain], name[UNSET]]]
if compare[name[subdomain] is_not name[UNSET]] begin[:]
variable[urlconf] assign[=] call[name[settings].SUBDOMAIN_URLCONFS.get, parameter[name[subdomain]]]
if compare[name[urlconf] is_not constant[None]] begin[:]
call[name[logger].debug, parameter[constant[Using urlconf %s for subdomain: %s], call[name[repr], parameter[name[urlconf]]], call[name[repr], parameter[name[subdomain]]]]]
name[request].urlconf assign[=] name[urlconf]
|
keyword[def] identifier[process_request] ( identifier[self] , identifier[request] ):
literal[string]
identifier[super] ( identifier[SubdomainURLRoutingMiddleware] , identifier[self] ). identifier[process_request] ( identifier[request] )
identifier[subdomain] = identifier[getattr] ( identifier[request] , literal[string] , identifier[UNSET] )
keyword[if] identifier[subdomain] keyword[is] keyword[not] identifier[UNSET] :
identifier[urlconf] = identifier[settings] . identifier[SUBDOMAIN_URLCONFS] . identifier[get] ( identifier[subdomain] )
keyword[if] identifier[urlconf] keyword[is] keyword[not] keyword[None] :
identifier[logger] . identifier[debug] ( literal[string] ,
identifier[repr] ( identifier[urlconf] ), identifier[repr] ( identifier[subdomain] ))
identifier[request] . identifier[urlconf] = identifier[urlconf]
|
def process_request(self, request):
"""
Sets the current request's ``urlconf`` attribute to the urlconf
associated with the subdomain, if it is listed in
``settings.SUBDOMAIN_URLCONFS``.
"""
super(SubdomainURLRoutingMiddleware, self).process_request(request)
subdomain = getattr(request, 'subdomain', UNSET)
if subdomain is not UNSET:
urlconf = settings.SUBDOMAIN_URLCONFS.get(subdomain)
if urlconf is not None:
logger.debug('Using urlconf %s for subdomain: %s', repr(urlconf), repr(subdomain))
request.urlconf = urlconf # depends on [control=['if'], data=['urlconf']] # depends on [control=['if'], data=['subdomain']]
|
def flow_pipeline(diameters, lengths, k_minors, target_headloss,
nu=con.WATER_NU, pipe_rough=mats.PVC_PIPE_ROUGH):
"""
This function takes a single pipeline with multiple sections, each potentially with different diameters,
lengths and minor loss coefficients and determines the flow rate for a given headloss.
:param diameters: list of diameters, where the i_th diameter corresponds to the i_th pipe section
:type diameters: numpy.ndarray
:param lengths: list of diameters, where the i_th diameter corresponds to the i_th pipe section
:type lengths: numpy.ndarray
:param k_minors: list of diameters, where the i_th diameter corresponds to the i_th pipe section
:type k_minors: numpy.ndarray
:param target_headloss: a single headloss describing the total headloss through the system
:type target_headloss: float
:param nu: The fluid dynamic viscosity of the fluid. Defaults to water at room temperature (1 * 10**-6 * m**2/s)
:type nu: float
:param pipe_rough: The pipe roughness. Defaults to PVC roughness.
:type pipe_rough: float
:return: the total flow through the system
:rtype: float
"""
# Ensure all the arguments except total headloss are the same length
#TODO
# Total number of pipe lengths
n = diameters.size
# Start with a flow rate guess based on the flow through a single pipe section
flow = pc.flow_pipe(diameters[0], target_headloss, lengths[0], nu, pipe_rough, k_minors[0])
err = 1.0
# Add all the pipe length headlosses together to test the error
while abs(err) > 0.01 :
headloss = sum([pc.headloss(flow, diameters[i], lengths[i], nu, pipe_rough,
k_minors[i]).to(u.m).magnitude for i in range(n)])
# Test the error. This is always less than one.
err = (target_headloss - headloss) / (target_headloss + headloss)
# Adjust the total flow in the direction of the error. If there is more headloss than target headloss,
# The flow should be reduced, and vice-versa.
flow = flow + err * flow
return flow
|
def function[flow_pipeline, parameter[diameters, lengths, k_minors, target_headloss, nu, pipe_rough]]:
constant[
This function takes a single pipeline with multiple sections, each potentially with different diameters,
lengths and minor loss coefficients and determines the flow rate for a given headloss.
:param diameters: list of diameters, where the i_th diameter corresponds to the i_th pipe section
:type diameters: numpy.ndarray
:param lengths: list of diameters, where the i_th diameter corresponds to the i_th pipe section
:type lengths: numpy.ndarray
:param k_minors: list of diameters, where the i_th diameter corresponds to the i_th pipe section
:type k_minors: numpy.ndarray
:param target_headloss: a single headloss describing the total headloss through the system
:type target_headloss: float
:param nu: The fluid dynamic viscosity of the fluid. Defaults to water at room temperature (1 * 10**-6 * m**2/s)
:type nu: float
:param pipe_rough: The pipe roughness. Defaults to PVC roughness.
:type pipe_rough: float
:return: the total flow through the system
:rtype: float
]
variable[n] assign[=] name[diameters].size
variable[flow] assign[=] call[name[pc].flow_pipe, parameter[call[name[diameters]][constant[0]], name[target_headloss], call[name[lengths]][constant[0]], name[nu], name[pipe_rough], call[name[k_minors]][constant[0]]]]
variable[err] assign[=] constant[1.0]
while compare[call[name[abs], parameter[name[err]]] greater[>] constant[0.01]] begin[:]
variable[headloss] assign[=] call[name[sum], parameter[<ast.ListComp object at 0x7da1b06bd030>]]
variable[err] assign[=] binary_operation[binary_operation[name[target_headloss] - name[headloss]] / binary_operation[name[target_headloss] + name[headloss]]]
variable[flow] assign[=] binary_operation[name[flow] + binary_operation[name[err] * name[flow]]]
return[name[flow]]
|
keyword[def] identifier[flow_pipeline] ( identifier[diameters] , identifier[lengths] , identifier[k_minors] , identifier[target_headloss] ,
identifier[nu] = identifier[con] . identifier[WATER_NU] , identifier[pipe_rough] = identifier[mats] . identifier[PVC_PIPE_ROUGH] ):
literal[string]
identifier[n] = identifier[diameters] . identifier[size]
identifier[flow] = identifier[pc] . identifier[flow_pipe] ( identifier[diameters] [ literal[int] ], identifier[target_headloss] , identifier[lengths] [ literal[int] ], identifier[nu] , identifier[pipe_rough] , identifier[k_minors] [ literal[int] ])
identifier[err] = literal[int]
keyword[while] identifier[abs] ( identifier[err] )> literal[int] :
identifier[headloss] = identifier[sum] ([ identifier[pc] . identifier[headloss] ( identifier[flow] , identifier[diameters] [ identifier[i] ], identifier[lengths] [ identifier[i] ], identifier[nu] , identifier[pipe_rough] ,
identifier[k_minors] [ identifier[i] ]). identifier[to] ( identifier[u] . identifier[m] ). identifier[magnitude] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] )])
identifier[err] =( identifier[target_headloss] - identifier[headloss] )/( identifier[target_headloss] + identifier[headloss] )
identifier[flow] = identifier[flow] + identifier[err] * identifier[flow]
keyword[return] identifier[flow]
|
def flow_pipeline(diameters, lengths, k_minors, target_headloss, nu=con.WATER_NU, pipe_rough=mats.PVC_PIPE_ROUGH):
"""
This function takes a single pipeline with multiple sections, each potentially with different diameters,
lengths and minor loss coefficients and determines the flow rate for a given headloss.
:param diameters: list of diameters, where the i_th diameter corresponds to the i_th pipe section
:type diameters: numpy.ndarray
:param lengths: list of diameters, where the i_th diameter corresponds to the i_th pipe section
:type lengths: numpy.ndarray
:param k_minors: list of diameters, where the i_th diameter corresponds to the i_th pipe section
:type k_minors: numpy.ndarray
:param target_headloss: a single headloss describing the total headloss through the system
:type target_headloss: float
:param nu: The fluid dynamic viscosity of the fluid. Defaults to water at room temperature (1 * 10**-6 * m**2/s)
:type nu: float
:param pipe_rough: The pipe roughness. Defaults to PVC roughness.
:type pipe_rough: float
:return: the total flow through the system
:rtype: float
"""
# Ensure all the arguments except total headloss are the same length
#TODO
# Total number of pipe lengths
n = diameters.size
# Start with a flow rate guess based on the flow through a single pipe section
flow = pc.flow_pipe(diameters[0], target_headloss, lengths[0], nu, pipe_rough, k_minors[0])
err = 1.0
# Add all the pipe length headlosses together to test the error
while abs(err) > 0.01:
headloss = sum([pc.headloss(flow, diameters[i], lengths[i], nu, pipe_rough, k_minors[i]).to(u.m).magnitude for i in range(n)])
# Test the error. This is always less than one.
err = (target_headloss - headloss) / (target_headloss + headloss)
# Adjust the total flow in the direction of the error. If there is more headloss than target headloss,
# The flow should be reduced, and vice-versa.
flow = flow + err * flow # depends on [control=['while'], data=[]]
return flow
|
def getInterpretation(self):
"""
Get the value of the previously POSTed Tropo action.
"""
actions = self._actions
if (type (actions) is list):
dict = actions[0]
else:
dict = actions
return dict['interpretation']
|
def function[getInterpretation, parameter[self]]:
constant[
Get the value of the previously POSTed Tropo action.
]
variable[actions] assign[=] name[self]._actions
if compare[call[name[type], parameter[name[actions]]] is name[list]] begin[:]
variable[dict] assign[=] call[name[actions]][constant[0]]
return[call[name[dict]][constant[interpretation]]]
|
keyword[def] identifier[getInterpretation] ( identifier[self] ):
literal[string]
identifier[actions] = identifier[self] . identifier[_actions]
keyword[if] ( identifier[type] ( identifier[actions] ) keyword[is] identifier[list] ):
identifier[dict] = identifier[actions] [ literal[int] ]
keyword[else] :
identifier[dict] = identifier[actions]
keyword[return] identifier[dict] [ literal[string] ]
|
def getInterpretation(self):
"""
Get the value of the previously POSTed Tropo action.
"""
actions = self._actions
if type(actions) is list:
dict = actions[0] # depends on [control=['if'], data=[]]
else:
dict = actions
return dict['interpretation']
|
def load_python_file(moduleobject):
""" Try to create an indexable instance from a module"""
if isinstance(moduleobject, str):
moduleobject = load_module(moduleobject)
if not hasattr(moduleobject, "iclass"):
raise KeyError("Element" + str(moduleobject))
iclass = getattr(moduleobject, "iclass")
mylist = getattr(moduleobject, "__all__", None) or list(filter(lambda x:x[:1] != "_", (dir(moduleobject))))
mylist.remove('iclass')
resultdic = {}
for x in mylist:
resultdic[x] = getattr(moduleobject, x)
if iclass == "SymbolGrammar":
from pydsl.grammar.BNF import BNFGrammar
return BNFGrammar(**resultdic)
elif iclass == "PLY":
from pydsl.grammar.definition import PLYGrammar
return PLYGrammar(moduleobject)
elif iclass in ["PythonGrammar"]:
from pydsl.grammar.definition import PythonGrammar
return PythonGrammar(resultdic)
elif iclass == "PythonTranslator":
return resultdic
elif iclass == "parsley":
from pydsl.grammar.parsley import ParsleyGrammar
return ParsleyGrammar(**resultdic)
elif iclass == "pyparsing":
return resultdic['root_symbol']
else:
raise ValueError(str(moduleobject))
|
def function[load_python_file, parameter[moduleobject]]:
constant[ Try to create an indexable instance from a module]
if call[name[isinstance], parameter[name[moduleobject], name[str]]] begin[:]
variable[moduleobject] assign[=] call[name[load_module], parameter[name[moduleobject]]]
if <ast.UnaryOp object at 0x7da18fe91150> begin[:]
<ast.Raise object at 0x7da18fe90eb0>
variable[iclass] assign[=] call[name[getattr], parameter[name[moduleobject], constant[iclass]]]
variable[mylist] assign[=] <ast.BoolOp object at 0x7da18fe92230>
call[name[mylist].remove, parameter[constant[iclass]]]
variable[resultdic] assign[=] dictionary[[], []]
for taget[name[x]] in starred[name[mylist]] begin[:]
call[name[resultdic]][name[x]] assign[=] call[name[getattr], parameter[name[moduleobject], name[x]]]
if compare[name[iclass] equal[==] constant[SymbolGrammar]] begin[:]
from relative_module[pydsl.grammar.BNF] import module[BNFGrammar]
return[call[name[BNFGrammar], parameter[]]]
|
keyword[def] identifier[load_python_file] ( identifier[moduleobject] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[moduleobject] , identifier[str] ):
identifier[moduleobject] = identifier[load_module] ( identifier[moduleobject] )
keyword[if] keyword[not] identifier[hasattr] ( identifier[moduleobject] , literal[string] ):
keyword[raise] identifier[KeyError] ( literal[string] + identifier[str] ( identifier[moduleobject] ))
identifier[iclass] = identifier[getattr] ( identifier[moduleobject] , literal[string] )
identifier[mylist] = identifier[getattr] ( identifier[moduleobject] , literal[string] , keyword[None] ) keyword[or] identifier[list] ( identifier[filter] ( keyword[lambda] identifier[x] : identifier[x] [: literal[int] ]!= literal[string] ,( identifier[dir] ( identifier[moduleobject] ))))
identifier[mylist] . identifier[remove] ( literal[string] )
identifier[resultdic] ={}
keyword[for] identifier[x] keyword[in] identifier[mylist] :
identifier[resultdic] [ identifier[x] ]= identifier[getattr] ( identifier[moduleobject] , identifier[x] )
keyword[if] identifier[iclass] == literal[string] :
keyword[from] identifier[pydsl] . identifier[grammar] . identifier[BNF] keyword[import] identifier[BNFGrammar]
keyword[return] identifier[BNFGrammar] (** identifier[resultdic] )
keyword[elif] identifier[iclass] == literal[string] :
keyword[from] identifier[pydsl] . identifier[grammar] . identifier[definition] keyword[import] identifier[PLYGrammar]
keyword[return] identifier[PLYGrammar] ( identifier[moduleobject] )
keyword[elif] identifier[iclass] keyword[in] [ literal[string] ]:
keyword[from] identifier[pydsl] . identifier[grammar] . identifier[definition] keyword[import] identifier[PythonGrammar]
keyword[return] identifier[PythonGrammar] ( identifier[resultdic] )
keyword[elif] identifier[iclass] == literal[string] :
keyword[return] identifier[resultdic]
keyword[elif] identifier[iclass] == literal[string] :
keyword[from] identifier[pydsl] . identifier[grammar] . identifier[parsley] keyword[import] identifier[ParsleyGrammar]
keyword[return] identifier[ParsleyGrammar] (** identifier[resultdic] )
keyword[elif] identifier[iclass] == literal[string] :
keyword[return] identifier[resultdic] [ literal[string] ]
keyword[else] :
keyword[raise] identifier[ValueError] ( identifier[str] ( identifier[moduleobject] ))
|
def load_python_file(moduleobject):
""" Try to create an indexable instance from a module"""
if isinstance(moduleobject, str):
moduleobject = load_module(moduleobject) # depends on [control=['if'], data=[]]
if not hasattr(moduleobject, 'iclass'):
raise KeyError('Element' + str(moduleobject)) # depends on [control=['if'], data=[]]
iclass = getattr(moduleobject, 'iclass')
mylist = getattr(moduleobject, '__all__', None) or list(filter(lambda x: x[:1] != '_', dir(moduleobject)))
mylist.remove('iclass')
resultdic = {}
for x in mylist:
resultdic[x] = getattr(moduleobject, x) # depends on [control=['for'], data=['x']]
if iclass == 'SymbolGrammar':
from pydsl.grammar.BNF import BNFGrammar
return BNFGrammar(**resultdic) # depends on [control=['if'], data=[]]
elif iclass == 'PLY':
from pydsl.grammar.definition import PLYGrammar
return PLYGrammar(moduleobject) # depends on [control=['if'], data=[]]
elif iclass in ['PythonGrammar']:
from pydsl.grammar.definition import PythonGrammar
return PythonGrammar(resultdic) # depends on [control=['if'], data=[]]
elif iclass == 'PythonTranslator':
return resultdic # depends on [control=['if'], data=[]]
elif iclass == 'parsley':
from pydsl.grammar.parsley import ParsleyGrammar
return ParsleyGrammar(**resultdic) # depends on [control=['if'], data=[]]
elif iclass == 'pyparsing':
return resultdic['root_symbol'] # depends on [control=['if'], data=[]]
else:
raise ValueError(str(moduleobject))
|
def dict_stack(dict_list, key_prefix=''):
r"""
stacks values from two dicts into a new dict where the values are list of
the input values. the keys are the same.
DEPRICATE in favor of dict_stack2
Args:
dict_list (list): list of dicts with similar keys
Returns:
dict dict_stacked
CommandLine:
python -m utool.util_dict --test-dict_stack
python -m utool.util_dict --test-dict_stack:1
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_dict import * # NOQA
>>> import utool as ut
>>> dict1_ = {'a': 1, 'b': 2}
>>> dict2_ = {'a': 2, 'b': 3, 'c': 4}
>>> dict_stacked = dict_stack([dict1_, dict2_])
>>> result = ut.repr2(dict_stacked, sorted_=True)
>>> print(result)
{'a': [1, 2], 'b': [2, 3], 'c': [4]}
Example1:
>>> # ENABLE_DOCTEST
>>> from utool.util_dict import * # NOQA
>>> import utool as ut
>>> # Get equivalent behavior with dict_stack2?
>>> # Almost, as long as None is not part of the list
>>> dict1_ = {'a': 1, 'b': 2}
>>> dict2_ = {'a': 2, 'b': 3, 'c': 4}
>>> dict_stacked_ = dict_stack2([dict1_, dict2_])
>>> dict_stacked = {key: ut.filter_Nones(val) for key, val in dict_stacked_.items()}
>>> result = ut.repr2(dict_stacked, sorted_=True)
>>> print(result)
{'a': [1, 2], 'b': [2, 3], 'c': [4]}
"""
dict_stacked_ = defaultdict(list)
for dict_ in dict_list:
for key, val in six.iteritems(dict_):
dict_stacked_[key_prefix + key].append(val)
dict_stacked = dict(dict_stacked_)
return dict_stacked
|
def function[dict_stack, parameter[dict_list, key_prefix]]:
constant[
stacks values from two dicts into a new dict where the values are list of
the input values. the keys are the same.
DEPRICATE in favor of dict_stack2
Args:
dict_list (list): list of dicts with similar keys
Returns:
dict dict_stacked
CommandLine:
python -m utool.util_dict --test-dict_stack
python -m utool.util_dict --test-dict_stack:1
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_dict import * # NOQA
>>> import utool as ut
>>> dict1_ = {'a': 1, 'b': 2}
>>> dict2_ = {'a': 2, 'b': 3, 'c': 4}
>>> dict_stacked = dict_stack([dict1_, dict2_])
>>> result = ut.repr2(dict_stacked, sorted_=True)
>>> print(result)
{'a': [1, 2], 'b': [2, 3], 'c': [4]}
Example1:
>>> # ENABLE_DOCTEST
>>> from utool.util_dict import * # NOQA
>>> import utool as ut
>>> # Get equivalent behavior with dict_stack2?
>>> # Almost, as long as None is not part of the list
>>> dict1_ = {'a': 1, 'b': 2}
>>> dict2_ = {'a': 2, 'b': 3, 'c': 4}
>>> dict_stacked_ = dict_stack2([dict1_, dict2_])
>>> dict_stacked = {key: ut.filter_Nones(val) for key, val in dict_stacked_.items()}
>>> result = ut.repr2(dict_stacked, sorted_=True)
>>> print(result)
{'a': [1, 2], 'b': [2, 3], 'c': [4]}
]
variable[dict_stacked_] assign[=] call[name[defaultdict], parameter[name[list]]]
for taget[name[dict_]] in starred[name[dict_list]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b24b7fd0>, <ast.Name object at 0x7da1b24b5360>]]] in starred[call[name[six].iteritems, parameter[name[dict_]]]] begin[:]
call[call[name[dict_stacked_]][binary_operation[name[key_prefix] + name[key]]].append, parameter[name[val]]]
variable[dict_stacked] assign[=] call[name[dict], parameter[name[dict_stacked_]]]
return[name[dict_stacked]]
|
keyword[def] identifier[dict_stack] ( identifier[dict_list] , identifier[key_prefix] = literal[string] ):
literal[string]
identifier[dict_stacked_] = identifier[defaultdict] ( identifier[list] )
keyword[for] identifier[dict_] keyword[in] identifier[dict_list] :
keyword[for] identifier[key] , identifier[val] keyword[in] identifier[six] . identifier[iteritems] ( identifier[dict_] ):
identifier[dict_stacked_] [ identifier[key_prefix] + identifier[key] ]. identifier[append] ( identifier[val] )
identifier[dict_stacked] = identifier[dict] ( identifier[dict_stacked_] )
keyword[return] identifier[dict_stacked]
|
def dict_stack(dict_list, key_prefix=''):
"""
stacks values from two dicts into a new dict where the values are list of
the input values. the keys are the same.
DEPRICATE in favor of dict_stack2
Args:
dict_list (list): list of dicts with similar keys
Returns:
dict dict_stacked
CommandLine:
python -m utool.util_dict --test-dict_stack
python -m utool.util_dict --test-dict_stack:1
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_dict import * # NOQA
>>> import utool as ut
>>> dict1_ = {'a': 1, 'b': 2}
>>> dict2_ = {'a': 2, 'b': 3, 'c': 4}
>>> dict_stacked = dict_stack([dict1_, dict2_])
>>> result = ut.repr2(dict_stacked, sorted_=True)
>>> print(result)
{'a': [1, 2], 'b': [2, 3], 'c': [4]}
Example1:
>>> # ENABLE_DOCTEST
>>> from utool.util_dict import * # NOQA
>>> import utool as ut
>>> # Get equivalent behavior with dict_stack2?
>>> # Almost, as long as None is not part of the list
>>> dict1_ = {'a': 1, 'b': 2}
>>> dict2_ = {'a': 2, 'b': 3, 'c': 4}
>>> dict_stacked_ = dict_stack2([dict1_, dict2_])
>>> dict_stacked = {key: ut.filter_Nones(val) for key, val in dict_stacked_.items()}
>>> result = ut.repr2(dict_stacked, sorted_=True)
>>> print(result)
{'a': [1, 2], 'b': [2, 3], 'c': [4]}
"""
dict_stacked_ = defaultdict(list)
for dict_ in dict_list:
for (key, val) in six.iteritems(dict_):
dict_stacked_[key_prefix + key].append(val) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['dict_']]
dict_stacked = dict(dict_stacked_)
return dict_stacked
|
def clean_server_response(self, resp_dict):
"""cleans the server reponse by replacing:
'-' -> None
'1,000' -> 1000
:param resp_dict:
:return: dict with all above substitution
"""
# change all the keys from unicode to string
d = {}
for key, value in resp_dict.items():
d[str(key)] = value
resp_dict = d
for key, value in resp_dict.items():
if type(value) is str or isinstance(value, six.string_types):
if re.match('-', value):
try:
if float(value) or int(value):
dataType = True
except ValueError:
resp_dict[key] = None
elif re.search(r'^[0-9,.]+$', value):
# replace , to '', and type cast to int
resp_dict[key] = float(re.sub(',', '', value))
else:
resp_dict[key] = str(value)
return resp_dict
|
def function[clean_server_response, parameter[self, resp_dict]]:
constant[cleans the server reponse by replacing:
'-' -> None
'1,000' -> 1000
:param resp_dict:
:return: dict with all above substitution
]
variable[d] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b1ddb040>, <ast.Name object at 0x7da1b1ddb850>]]] in starred[call[name[resp_dict].items, parameter[]]] begin[:]
call[name[d]][call[name[str], parameter[name[key]]]] assign[=] name[value]
variable[resp_dict] assign[=] name[d]
for taget[tuple[[<ast.Name object at 0x7da1b1dd9d80>, <ast.Name object at 0x7da1b1dd9ff0>]]] in starred[call[name[resp_dict].items, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b1ddb940> begin[:]
if call[name[re].match, parameter[constant[-], name[value]]] begin[:]
<ast.Try object at 0x7da1b1ddbc40>
return[name[resp_dict]]
|
keyword[def] identifier[clean_server_response] ( identifier[self] , identifier[resp_dict] ):
literal[string]
identifier[d] ={}
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[resp_dict] . identifier[items] ():
identifier[d] [ identifier[str] ( identifier[key] )]= identifier[value]
identifier[resp_dict] = identifier[d]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[resp_dict] . identifier[items] ():
keyword[if] identifier[type] ( identifier[value] ) keyword[is] identifier[str] keyword[or] identifier[isinstance] ( identifier[value] , identifier[six] . identifier[string_types] ):
keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[value] ):
keyword[try] :
keyword[if] identifier[float] ( identifier[value] ) keyword[or] identifier[int] ( identifier[value] ):
identifier[dataType] = keyword[True]
keyword[except] identifier[ValueError] :
identifier[resp_dict] [ identifier[key] ]= keyword[None]
keyword[elif] identifier[re] . identifier[search] ( literal[string] , identifier[value] ):
identifier[resp_dict] [ identifier[key] ]= identifier[float] ( identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[value] ))
keyword[else] :
identifier[resp_dict] [ identifier[key] ]= identifier[str] ( identifier[value] )
keyword[return] identifier[resp_dict]
|
def clean_server_response(self, resp_dict):
"""cleans the server reponse by replacing:
'-' -> None
'1,000' -> 1000
:param resp_dict:
:return: dict with all above substitution
"""
# change all the keys from unicode to string
d = {}
for (key, value) in resp_dict.items():
d[str(key)] = value # depends on [control=['for'], data=[]]
resp_dict = d
for (key, value) in resp_dict.items():
if type(value) is str or isinstance(value, six.string_types):
if re.match('-', value):
try:
if float(value) or int(value):
dataType = True # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except ValueError:
resp_dict[key] = None # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
elif re.search('^[0-9,.]+$', value):
# replace , to '', and type cast to int
resp_dict[key] = float(re.sub(',', '', value)) # depends on [control=['if'], data=[]]
else:
resp_dict[key] = str(value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return resp_dict
|
def read_pickle(fn):
"""Load a GOParser object from a pickle file.
The function automatically detects whether the file is compressed
with gzip.
Parameters
----------
fn: str
Path of the pickle file.
Returns
-------
`GOParser`
The GOParser object stored in the pickle file.
"""
with misc.open_plain_or_gzip(fn, 'rb') as fh:
parser = pickle.load(fh)
return parser
|
def function[read_pickle, parameter[fn]]:
constant[Load a GOParser object from a pickle file.
The function automatically detects whether the file is compressed
with gzip.
Parameters
----------
fn: str
Path of the pickle file.
Returns
-------
`GOParser`
The GOParser object stored in the pickle file.
]
with call[name[misc].open_plain_or_gzip, parameter[name[fn], constant[rb]]] begin[:]
variable[parser] assign[=] call[name[pickle].load, parameter[name[fh]]]
return[name[parser]]
|
keyword[def] identifier[read_pickle] ( identifier[fn] ):
literal[string]
keyword[with] identifier[misc] . identifier[open_plain_or_gzip] ( identifier[fn] , literal[string] ) keyword[as] identifier[fh] :
identifier[parser] = identifier[pickle] . identifier[load] ( identifier[fh] )
keyword[return] identifier[parser]
|
def read_pickle(fn):
"""Load a GOParser object from a pickle file.
The function automatically detects whether the file is compressed
with gzip.
Parameters
----------
fn: str
Path of the pickle file.
Returns
-------
`GOParser`
The GOParser object stored in the pickle file.
"""
with misc.open_plain_or_gzip(fn, 'rb') as fh:
parser = pickle.load(fh) # depends on [control=['with'], data=['fh']]
return parser
|
def read_until_close(self) -> Awaitable[bytes]:
"""Asynchronously reads all data from the socket until it is closed.
This will buffer all available data until ``max_buffer_size``
is reached. If flow control or cancellation are desired, use a
loop with `read_bytes(partial=True) <.read_bytes>` instead.
.. versionchanged:: 4.0
The callback argument is now optional and a `.Future` will
be returned if it is omitted.
.. versionchanged:: 6.0
The ``callback`` and ``streaming_callback`` arguments have
been removed. Use the returned `.Future` (and `read_bytes`
with ``partial=True`` for ``streaming_callback``) instead.
"""
future = self._start_read()
if self.closed():
self._finish_read(self._read_buffer_size, False)
return future
self._read_until_close = True
try:
self._try_inline_read()
except:
future.add_done_callback(lambda f: f.exception())
raise
return future
|
def function[read_until_close, parameter[self]]:
constant[Asynchronously reads all data from the socket until it is closed.
This will buffer all available data until ``max_buffer_size``
is reached. If flow control or cancellation are desired, use a
loop with `read_bytes(partial=True) <.read_bytes>` instead.
.. versionchanged:: 4.0
The callback argument is now optional and a `.Future` will
be returned if it is omitted.
.. versionchanged:: 6.0
The ``callback`` and ``streaming_callback`` arguments have
been removed. Use the returned `.Future` (and `read_bytes`
with ``partial=True`` for ``streaming_callback``) instead.
]
variable[future] assign[=] call[name[self]._start_read, parameter[]]
if call[name[self].closed, parameter[]] begin[:]
call[name[self]._finish_read, parameter[name[self]._read_buffer_size, constant[False]]]
return[name[future]]
name[self]._read_until_close assign[=] constant[True]
<ast.Try object at 0x7da1b1f18d00>
return[name[future]]
|
keyword[def] identifier[read_until_close] ( identifier[self] )-> identifier[Awaitable] [ identifier[bytes] ]:
literal[string]
identifier[future] = identifier[self] . identifier[_start_read] ()
keyword[if] identifier[self] . identifier[closed] ():
identifier[self] . identifier[_finish_read] ( identifier[self] . identifier[_read_buffer_size] , keyword[False] )
keyword[return] identifier[future]
identifier[self] . identifier[_read_until_close] = keyword[True]
keyword[try] :
identifier[self] . identifier[_try_inline_read] ()
keyword[except] :
identifier[future] . identifier[add_done_callback] ( keyword[lambda] identifier[f] : identifier[f] . identifier[exception] ())
keyword[raise]
keyword[return] identifier[future]
|
def read_until_close(self) -> Awaitable[bytes]:
"""Asynchronously reads all data from the socket until it is closed.
This will buffer all available data until ``max_buffer_size``
is reached. If flow control or cancellation are desired, use a
loop with `read_bytes(partial=True) <.read_bytes>` instead.
.. versionchanged:: 4.0
The callback argument is now optional and a `.Future` will
be returned if it is omitted.
.. versionchanged:: 6.0
The ``callback`` and ``streaming_callback`` arguments have
been removed. Use the returned `.Future` (and `read_bytes`
with ``partial=True`` for ``streaming_callback``) instead.
"""
future = self._start_read()
if self.closed():
self._finish_read(self._read_buffer_size, False)
return future # depends on [control=['if'], data=[]]
self._read_until_close = True
try:
self._try_inline_read() # depends on [control=['try'], data=[]]
except:
future.add_done_callback(lambda f: f.exception())
raise # depends on [control=['except'], data=[]]
return future
|
def get_platforms(self, automation_api='all'):
"""Get a list of objects describing all the OS and browser platforms
currently supported on Sauce Labs."""
method = 'GET'
endpoint = '/rest/v1/info/platforms/{}'.format(automation_api)
return self.client.request(method, endpoint)
|
def function[get_platforms, parameter[self, automation_api]]:
constant[Get a list of objects describing all the OS and browser platforms
currently supported on Sauce Labs.]
variable[method] assign[=] constant[GET]
variable[endpoint] assign[=] call[constant[/rest/v1/info/platforms/{}].format, parameter[name[automation_api]]]
return[call[name[self].client.request, parameter[name[method], name[endpoint]]]]
|
keyword[def] identifier[get_platforms] ( identifier[self] , identifier[automation_api] = literal[string] ):
literal[string]
identifier[method] = literal[string]
identifier[endpoint] = literal[string] . identifier[format] ( identifier[automation_api] )
keyword[return] identifier[self] . identifier[client] . identifier[request] ( identifier[method] , identifier[endpoint] )
|
def get_platforms(self, automation_api='all'):
"""Get a list of objects describing all the OS and browser platforms
currently supported on Sauce Labs."""
method = 'GET'
endpoint = '/rest/v1/info/platforms/{}'.format(automation_api)
return self.client.request(method, endpoint)
|
def pipeline(self):
"""Returns :class:`Pipeline` object to execute bulk of commands.
It is provided for convenience.
Commands can be pipelined without it.
Example:
>>> pipe = redis.pipeline()
>>> fut1 = pipe.incr('foo') # NO `await` as it will block forever!
>>> fut2 = pipe.incr('bar')
>>> result = await pipe.execute()
>>> result
[1, 1]
>>> await asyncio.gather(fut1, fut2)
[1, 1]
>>> #
>>> # The same can be done without pipeline:
>>> #
>>> fut1 = redis.incr('foo') # the 'INCRY foo' command already sent
>>> fut2 = redis.incr('bar')
>>> await asyncio.gather(fut1, fut2)
[2, 2]
"""
return Pipeline(self._pool_or_conn, self.__class__,
loop=self._pool_or_conn._loop)
|
def function[pipeline, parameter[self]]:
constant[Returns :class:`Pipeline` object to execute bulk of commands.
It is provided for convenience.
Commands can be pipelined without it.
Example:
>>> pipe = redis.pipeline()
>>> fut1 = pipe.incr('foo') # NO `await` as it will block forever!
>>> fut2 = pipe.incr('bar')
>>> result = await pipe.execute()
>>> result
[1, 1]
>>> await asyncio.gather(fut1, fut2)
[1, 1]
>>> #
>>> # The same can be done without pipeline:
>>> #
>>> fut1 = redis.incr('foo') # the 'INCRY foo' command already sent
>>> fut2 = redis.incr('bar')
>>> await asyncio.gather(fut1, fut2)
[2, 2]
]
return[call[name[Pipeline], parameter[name[self]._pool_or_conn, name[self].__class__]]]
|
keyword[def] identifier[pipeline] ( identifier[self] ):
literal[string]
keyword[return] identifier[Pipeline] ( identifier[self] . identifier[_pool_or_conn] , identifier[self] . identifier[__class__] ,
identifier[loop] = identifier[self] . identifier[_pool_or_conn] . identifier[_loop] )
|
def pipeline(self):
"""Returns :class:`Pipeline` object to execute bulk of commands.
It is provided for convenience.
Commands can be pipelined without it.
Example:
>>> pipe = redis.pipeline()
>>> fut1 = pipe.incr('foo') # NO `await` as it will block forever!
>>> fut2 = pipe.incr('bar')
>>> result = await pipe.execute()
>>> result
[1, 1]
>>> await asyncio.gather(fut1, fut2)
[1, 1]
>>> #
>>> # The same can be done without pipeline:
>>> #
>>> fut1 = redis.incr('foo') # the 'INCRY foo' command already sent
>>> fut2 = redis.incr('bar')
>>> await asyncio.gather(fut1, fut2)
[2, 2]
"""
return Pipeline(self._pool_or_conn, self.__class__, loop=self._pool_or_conn._loop)
|
def set_label(self, value,callb=None):
"""Convenience method to set the label of the device
This method will send a SetLabel message to the device, and request callb be executed
when an ACK is received. The default callback will simply cache the value.
:param value: The new label
:type value: str
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: None
:rtype: None
"""
if len(value) > 32:
value = value[:32]
mypartial=partial(self.resp_set_label,label=value)
if callb:
self.req_with_ack(SetLabel, {"label": value},lambda x,y:(mypartial(y),callb(x,y)) )
else:
self.req_with_ack(SetLabel, {"label": value},lambda x,y:mypartial(y) )
|
def function[set_label, parameter[self, value, callb]]:
constant[Convenience method to set the label of the device
This method will send a SetLabel message to the device, and request callb be executed
when an ACK is received. The default callback will simply cache the value.
:param value: The new label
:type value: str
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: None
:rtype: None
]
if compare[call[name[len], parameter[name[value]]] greater[>] constant[32]] begin[:]
variable[value] assign[=] call[name[value]][<ast.Slice object at 0x7da18dc059c0>]
variable[mypartial] assign[=] call[name[partial], parameter[name[self].resp_set_label]]
if name[callb] begin[:]
call[name[self].req_with_ack, parameter[name[SetLabel], dictionary[[<ast.Constant object at 0x7da18dc053c0>], [<ast.Name object at 0x7da18dc06110>]], <ast.Lambda object at 0x7da18dc07070>]]
|
keyword[def] identifier[set_label] ( identifier[self] , identifier[value] , identifier[callb] = keyword[None] ):
literal[string]
keyword[if] identifier[len] ( identifier[value] )> literal[int] :
identifier[value] = identifier[value] [: literal[int] ]
identifier[mypartial] = identifier[partial] ( identifier[self] . identifier[resp_set_label] , identifier[label] = identifier[value] )
keyword[if] identifier[callb] :
identifier[self] . identifier[req_with_ack] ( identifier[SetLabel] ,{ literal[string] : identifier[value] }, keyword[lambda] identifier[x] , identifier[y] :( identifier[mypartial] ( identifier[y] ), identifier[callb] ( identifier[x] , identifier[y] )))
keyword[else] :
identifier[self] . identifier[req_with_ack] ( identifier[SetLabel] ,{ literal[string] : identifier[value] }, keyword[lambda] identifier[x] , identifier[y] : identifier[mypartial] ( identifier[y] ))
|
def set_label(self, value, callb=None):
"""Convenience method to set the label of the device
This method will send a SetLabel message to the device, and request callb be executed
when an ACK is received. The default callback will simply cache the value.
:param value: The new label
:type value: str
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: None
:rtype: None
"""
if len(value) > 32:
value = value[:32] # depends on [control=['if'], data=[]]
mypartial = partial(self.resp_set_label, label=value)
if callb:
self.req_with_ack(SetLabel, {'label': value}, lambda x, y: (mypartial(y), callb(x, y))) # depends on [control=['if'], data=[]]
else:
self.req_with_ack(SetLabel, {'label': value}, lambda x, y: mypartial(y))
|
def sawtooth(duration: int, amp: complex, period: float = None,
phase: float = 0, name: str = None) -> SamplePulse:
"""Generates sawtooth wave `SamplePulse`.
Args:
duration: Duration of pulse. Must be greater than zero.
amp: Pulse amplitude. Wave range is [-amp, amp].
period: Pulse period, units of dt. If `None` defaults to single cycle.
phase: Pulse phase.
name: Name of pulse.
"""
if period is None:
period = duration
return _sampled_sawtooth_pulse(duration, amp, period, phase=phase, name=name)
|
def function[sawtooth, parameter[duration, amp, period, phase, name]]:
constant[Generates sawtooth wave `SamplePulse`.
Args:
duration: Duration of pulse. Must be greater than zero.
amp: Pulse amplitude. Wave range is [-amp, amp].
period: Pulse period, units of dt. If `None` defaults to single cycle.
phase: Pulse phase.
name: Name of pulse.
]
if compare[name[period] is constant[None]] begin[:]
variable[period] assign[=] name[duration]
return[call[name[_sampled_sawtooth_pulse], parameter[name[duration], name[amp], name[period]]]]
|
keyword[def] identifier[sawtooth] ( identifier[duration] : identifier[int] , identifier[amp] : identifier[complex] , identifier[period] : identifier[float] = keyword[None] ,
identifier[phase] : identifier[float] = literal[int] , identifier[name] : identifier[str] = keyword[None] )-> identifier[SamplePulse] :
literal[string]
keyword[if] identifier[period] keyword[is] keyword[None] :
identifier[period] = identifier[duration]
keyword[return] identifier[_sampled_sawtooth_pulse] ( identifier[duration] , identifier[amp] , identifier[period] , identifier[phase] = identifier[phase] , identifier[name] = identifier[name] )
|
def sawtooth(duration: int, amp: complex, period: float=None, phase: float=0, name: str=None) -> SamplePulse:
"""Generates sawtooth wave `SamplePulse`.
Args:
duration: Duration of pulse. Must be greater than zero.
amp: Pulse amplitude. Wave range is [-amp, amp].
period: Pulse period, units of dt. If `None` defaults to single cycle.
phase: Pulse phase.
name: Name of pulse.
"""
if period is None:
period = duration # depends on [control=['if'], data=['period']]
return _sampled_sawtooth_pulse(duration, amp, period, phase=phase, name=name)
|
def setup_es(hosts, port, use_ssl=False, auth=None):
"""
Setup an Elasticsearch connection
Parameters
----------
hosts: list
Hostnames / IP addresses for elasticsearch cluster
port: string
Port for elasticsearch cluster
use_ssl: boolean
Whether to use SSL for the elasticsearch connection
auth: tuple
(username, password) to use with HTTP auth
Returns
-------
es_conn: an elasticsearch_dsl Search connection object.
"""
kwargs = dict(
hosts=hosts or ['localhost'],
port=port or 9200,
use_ssl=use_ssl,
)
if auth:
kwargs.update(http_auth=auth)
CLIENT = Elasticsearch(**kwargs)
S = Search(using=CLIENT, index="geonames")
return S
|
def function[setup_es, parameter[hosts, port, use_ssl, auth]]:
constant[
Setup an Elasticsearch connection
Parameters
----------
hosts: list
Hostnames / IP addresses for elasticsearch cluster
port: string
Port for elasticsearch cluster
use_ssl: boolean
Whether to use SSL for the elasticsearch connection
auth: tuple
(username, password) to use with HTTP auth
Returns
-------
es_conn: an elasticsearch_dsl Search connection object.
]
variable[kwargs] assign[=] call[name[dict], parameter[]]
if name[auth] begin[:]
call[name[kwargs].update, parameter[]]
variable[CLIENT] assign[=] call[name[Elasticsearch], parameter[]]
variable[S] assign[=] call[name[Search], parameter[]]
return[name[S]]
|
keyword[def] identifier[setup_es] ( identifier[hosts] , identifier[port] , identifier[use_ssl] = keyword[False] , identifier[auth] = keyword[None] ):
literal[string]
identifier[kwargs] = identifier[dict] (
identifier[hosts] = identifier[hosts] keyword[or] [ literal[string] ],
identifier[port] = identifier[port] keyword[or] literal[int] ,
identifier[use_ssl] = identifier[use_ssl] ,
)
keyword[if] identifier[auth] :
identifier[kwargs] . identifier[update] ( identifier[http_auth] = identifier[auth] )
identifier[CLIENT] = identifier[Elasticsearch] (** identifier[kwargs] )
identifier[S] = identifier[Search] ( identifier[using] = identifier[CLIENT] , identifier[index] = literal[string] )
keyword[return] identifier[S]
|
def setup_es(hosts, port, use_ssl=False, auth=None):
"""
Setup an Elasticsearch connection
Parameters
----------
hosts: list
Hostnames / IP addresses for elasticsearch cluster
port: string
Port for elasticsearch cluster
use_ssl: boolean
Whether to use SSL for the elasticsearch connection
auth: tuple
(username, password) to use with HTTP auth
Returns
-------
es_conn: an elasticsearch_dsl Search connection object.
"""
kwargs = dict(hosts=hosts or ['localhost'], port=port or 9200, use_ssl=use_ssl)
if auth:
kwargs.update(http_auth=auth) # depends on [control=['if'], data=[]]
CLIENT = Elasticsearch(**kwargs)
S = Search(using=CLIENT, index='geonames')
return S
|
def save_favorite_query(arg, **_):
"""Save a new favorite query.
Returns (title, rows, headers, status)"""
usage = 'Syntax: \\fs name query.\n\n' + favoritequeries.usage
if not arg:
return [(None, None, None, usage)]
name, _, query = arg.partition(' ')
# If either name or query is missing then print the usage and complain.
if (not name) or (not query):
return [(None, None, None,
usage + 'Err: Both name and query are required.')]
favoritequeries.save(name, query)
return [(None, None, None, "Saved.")]
|
def function[save_favorite_query, parameter[arg]]:
constant[Save a new favorite query.
Returns (title, rows, headers, status)]
variable[usage] assign[=] binary_operation[constant[Syntax: \fs name query.
] + name[favoritequeries].usage]
if <ast.UnaryOp object at 0x7da2041dada0> begin[:]
return[list[[<ast.Tuple object at 0x7da2041db5b0>]]]
<ast.Tuple object at 0x7da2041db8e0> assign[=] call[name[arg].partition, parameter[constant[ ]]]
if <ast.BoolOp object at 0x7da2041d8730> begin[:]
return[list[[<ast.Tuple object at 0x7da2041dac80>]]]
call[name[favoritequeries].save, parameter[name[name], name[query]]]
return[list[[<ast.Tuple object at 0x7da2041dbd60>]]]
|
keyword[def] identifier[save_favorite_query] ( identifier[arg] ,** identifier[_] ):
literal[string]
identifier[usage] = literal[string] + identifier[favoritequeries] . identifier[usage]
keyword[if] keyword[not] identifier[arg] :
keyword[return] [( keyword[None] , keyword[None] , keyword[None] , identifier[usage] )]
identifier[name] , identifier[_] , identifier[query] = identifier[arg] . identifier[partition] ( literal[string] )
keyword[if] ( keyword[not] identifier[name] ) keyword[or] ( keyword[not] identifier[query] ):
keyword[return] [( keyword[None] , keyword[None] , keyword[None] ,
identifier[usage] + literal[string] )]
identifier[favoritequeries] . identifier[save] ( identifier[name] , identifier[query] )
keyword[return] [( keyword[None] , keyword[None] , keyword[None] , literal[string] )]
|
def save_favorite_query(arg, **_):
"""Save a new favorite query.
Returns (title, rows, headers, status)"""
usage = 'Syntax: \\fs name query.\n\n' + favoritequeries.usage
if not arg:
return [(None, None, None, usage)] # depends on [control=['if'], data=[]]
(name, _, query) = arg.partition(' ')
# If either name or query is missing then print the usage and complain.
if not name or not query:
return [(None, None, None, usage + 'Err: Both name and query are required.')] # depends on [control=['if'], data=[]]
favoritequeries.save(name, query)
return [(None, None, None, 'Saved.')]
|
def has_changed_since_last_deploy(file_path, bucket):
"""
Checks if a file has changed since the last time it was deployed.
:param file_path: Path to file which should be checked. Should be relative
from root of bucket.
:param bucket_name: Name of S3 bucket to check against.
:returns: True if the file has changed, else False.
"""
msg = "Checking if {0} has changed since last deploy.".format(file_path)
logger.debug(msg)
with open(file_path) as f:
data = f.read()
file_md5 = hashlib.md5(data.encode('utf-8')).hexdigest()
logger.debug("file_md5 is {0}".format(file_md5))
key = bucket.get_key(file_path)
# HACK: Boto's md5 property does not work when the file hasn't been
# downloaded. The etag works but will break for multi-part uploaded files.
# http://stackoverflow.com/questions/16872679/how-to-programmatically-
# get-the-md5-checksum-of-amazon-s3-file-using-boto/17607096#17607096
# Also the double quotes around it must be stripped. Sketchy...boto's fault
if key:
key_md5 = key.etag.replace('"', '').strip()
logger.debug("key_md5 is {0}".format(key_md5))
else:
logger.debug("File does not exist in bucket")
return True
if file_md5 == key_md5:
logger.debug("File has not changed.")
return False
logger.debug("File has changed.")
return True
|
def function[has_changed_since_last_deploy, parameter[file_path, bucket]]:
constant[
Checks if a file has changed since the last time it was deployed.
:param file_path: Path to file which should be checked. Should be relative
from root of bucket.
:param bucket_name: Name of S3 bucket to check against.
:returns: True if the file has changed, else False.
]
variable[msg] assign[=] call[constant[Checking if {0} has changed since last deploy.].format, parameter[name[file_path]]]
call[name[logger].debug, parameter[name[msg]]]
with call[name[open], parameter[name[file_path]]] begin[:]
variable[data] assign[=] call[name[f].read, parameter[]]
variable[file_md5] assign[=] call[call[name[hashlib].md5, parameter[call[name[data].encode, parameter[constant[utf-8]]]]].hexdigest, parameter[]]
call[name[logger].debug, parameter[call[constant[file_md5 is {0}].format, parameter[name[file_md5]]]]]
variable[key] assign[=] call[name[bucket].get_key, parameter[name[file_path]]]
if name[key] begin[:]
variable[key_md5] assign[=] call[call[name[key].etag.replace, parameter[constant["], constant[]]].strip, parameter[]]
call[name[logger].debug, parameter[call[constant[key_md5 is {0}].format, parameter[name[key_md5]]]]]
if compare[name[file_md5] equal[==] name[key_md5]] begin[:]
call[name[logger].debug, parameter[constant[File has not changed.]]]
return[constant[False]]
call[name[logger].debug, parameter[constant[File has changed.]]]
return[constant[True]]
|
keyword[def] identifier[has_changed_since_last_deploy] ( identifier[file_path] , identifier[bucket] ):
literal[string]
identifier[msg] = literal[string] . identifier[format] ( identifier[file_path] )
identifier[logger] . identifier[debug] ( identifier[msg] )
keyword[with] identifier[open] ( identifier[file_path] ) keyword[as] identifier[f] :
identifier[data] = identifier[f] . identifier[read] ()
identifier[file_md5] = identifier[hashlib] . identifier[md5] ( identifier[data] . identifier[encode] ( literal[string] )). identifier[hexdigest] ()
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[file_md5] ))
identifier[key] = identifier[bucket] . identifier[get_key] ( identifier[file_path] )
keyword[if] identifier[key] :
identifier[key_md5] = identifier[key] . identifier[etag] . identifier[replace] ( literal[string] , literal[string] ). identifier[strip] ()
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[key_md5] ))
keyword[else] :
identifier[logger] . identifier[debug] ( literal[string] )
keyword[return] keyword[True]
keyword[if] identifier[file_md5] == identifier[key_md5] :
identifier[logger] . identifier[debug] ( literal[string] )
keyword[return] keyword[False]
identifier[logger] . identifier[debug] ( literal[string] )
keyword[return] keyword[True]
|
def has_changed_since_last_deploy(file_path, bucket):
"""
Checks if a file has changed since the last time it was deployed.
:param file_path: Path to file which should be checked. Should be relative
from root of bucket.
:param bucket_name: Name of S3 bucket to check against.
:returns: True if the file has changed, else False.
"""
msg = 'Checking if {0} has changed since last deploy.'.format(file_path)
logger.debug(msg)
with open(file_path) as f:
data = f.read()
file_md5 = hashlib.md5(data.encode('utf-8')).hexdigest()
logger.debug('file_md5 is {0}'.format(file_md5)) # depends on [control=['with'], data=['f']]
key = bucket.get_key(file_path)
# HACK: Boto's md5 property does not work when the file hasn't been
# downloaded. The etag works but will break for multi-part uploaded files.
# http://stackoverflow.com/questions/16872679/how-to-programmatically-
# get-the-md5-checksum-of-amazon-s3-file-using-boto/17607096#17607096
# Also the double quotes around it must be stripped. Sketchy...boto's fault
if key:
key_md5 = key.etag.replace('"', '').strip()
logger.debug('key_md5 is {0}'.format(key_md5)) # depends on [control=['if'], data=[]]
else:
logger.debug('File does not exist in bucket')
return True
if file_md5 == key_md5:
logger.debug('File has not changed.')
return False # depends on [control=['if'], data=[]]
logger.debug('File has changed.')
return True
|
def mirror(self):
"""For a composite instruction, reverse the order of sub-gates.
This is done by recursively mirroring all sub-instructions.
It does not invert any gate.
Returns:
Instruction: a fresh gate with sub-gates reversed
"""
if not self._definition:
return self.copy()
reverse_inst = self.copy(name=self.name + '_mirror')
reverse_inst.definition = []
for inst, qargs, cargs in reversed(self._definition):
reverse_inst._definition.append((inst.mirror(), qargs, cargs))
return reverse_inst
|
def function[mirror, parameter[self]]:
constant[For a composite instruction, reverse the order of sub-gates.
This is done by recursively mirroring all sub-instructions.
It does not invert any gate.
Returns:
Instruction: a fresh gate with sub-gates reversed
]
if <ast.UnaryOp object at 0x7da18f8123e0> begin[:]
return[call[name[self].copy, parameter[]]]
variable[reverse_inst] assign[=] call[name[self].copy, parameter[]]
name[reverse_inst].definition assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18f812dd0>, <ast.Name object at 0x7da18f811ff0>, <ast.Name object at 0x7da18f811e40>]]] in starred[call[name[reversed], parameter[name[self]._definition]]] begin[:]
call[name[reverse_inst]._definition.append, parameter[tuple[[<ast.Call object at 0x7da18f813a60>, <ast.Name object at 0x7da18f812e90>, <ast.Name object at 0x7da18f813ac0>]]]]
return[name[reverse_inst]]
|
keyword[def] identifier[mirror] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_definition] :
keyword[return] identifier[self] . identifier[copy] ()
identifier[reverse_inst] = identifier[self] . identifier[copy] ( identifier[name] = identifier[self] . identifier[name] + literal[string] )
identifier[reverse_inst] . identifier[definition] =[]
keyword[for] identifier[inst] , identifier[qargs] , identifier[cargs] keyword[in] identifier[reversed] ( identifier[self] . identifier[_definition] ):
identifier[reverse_inst] . identifier[_definition] . identifier[append] (( identifier[inst] . identifier[mirror] (), identifier[qargs] , identifier[cargs] ))
keyword[return] identifier[reverse_inst]
|
def mirror(self):
"""For a composite instruction, reverse the order of sub-gates.
This is done by recursively mirroring all sub-instructions.
It does not invert any gate.
Returns:
Instruction: a fresh gate with sub-gates reversed
"""
if not self._definition:
return self.copy() # depends on [control=['if'], data=[]]
reverse_inst = self.copy(name=self.name + '_mirror')
reverse_inst.definition = []
for (inst, qargs, cargs) in reversed(self._definition):
reverse_inst._definition.append((inst.mirror(), qargs, cargs)) # depends on [control=['for'], data=[]]
return reverse_inst
|
def _lease_owned(self, lease, current_uuid_path):
"""
Checks if the given lease is owned by the prefix whose uuid is in
the given path
Note:
The prefix must be also in the same path it was when it took the
lease
Args:
path (str): Path to the lease
current_uuid_path (str): Path to the uuid to check ownership of
Returns:
bool: ``True`` if the given lease in owned by the prefix,
``False`` otherwise
"""
prev_uuid_path, prev_uuid = lease.metadata
with open(current_uuid_path) as f:
current_uuid = f.read()
return \
current_uuid_path == prev_uuid_path and \
prev_uuid == current_uuid
|
def function[_lease_owned, parameter[self, lease, current_uuid_path]]:
constant[
Checks if the given lease is owned by the prefix whose uuid is in
the given path
Note:
The prefix must be also in the same path it was when it took the
lease
Args:
path (str): Path to the lease
current_uuid_path (str): Path to the uuid to check ownership of
Returns:
bool: ``True`` if the given lease in owned by the prefix,
``False`` otherwise
]
<ast.Tuple object at 0x7da1b2347760> assign[=] name[lease].metadata
with call[name[open], parameter[name[current_uuid_path]]] begin[:]
variable[current_uuid] assign[=] call[name[f].read, parameter[]]
return[<ast.BoolOp object at 0x7da18f7203d0>]
|
keyword[def] identifier[_lease_owned] ( identifier[self] , identifier[lease] , identifier[current_uuid_path] ):
literal[string]
identifier[prev_uuid_path] , identifier[prev_uuid] = identifier[lease] . identifier[metadata]
keyword[with] identifier[open] ( identifier[current_uuid_path] ) keyword[as] identifier[f] :
identifier[current_uuid] = identifier[f] . identifier[read] ()
keyword[return] identifier[current_uuid_path] == identifier[prev_uuid_path] keyword[and] identifier[prev_uuid] == identifier[current_uuid]
|
def _lease_owned(self, lease, current_uuid_path):
"""
Checks if the given lease is owned by the prefix whose uuid is in
the given path
Note:
The prefix must be also in the same path it was when it took the
lease
Args:
path (str): Path to the lease
current_uuid_path (str): Path to the uuid to check ownership of
Returns:
bool: ``True`` if the given lease in owned by the prefix,
``False`` otherwise
"""
(prev_uuid_path, prev_uuid) = lease.metadata
with open(current_uuid_path) as f:
current_uuid = f.read() # depends on [control=['with'], data=['f']]
return current_uuid_path == prev_uuid_path and prev_uuid == current_uuid
|
def GetRootFileEntry(self):
"""Retrieves the root file entry.
Returns:
DataRangeFileEntry: a file entry or None if not available.
"""
path_spec = data_range_path_spec.DataRangePathSpec(
range_offset=self._range_offset,
range_size=self._range_size,
parent=self._path_spec.parent)
return self.GetFileEntryByPathSpec(path_spec)
|
def function[GetRootFileEntry, parameter[self]]:
constant[Retrieves the root file entry.
Returns:
DataRangeFileEntry: a file entry or None if not available.
]
variable[path_spec] assign[=] call[name[data_range_path_spec].DataRangePathSpec, parameter[]]
return[call[name[self].GetFileEntryByPathSpec, parameter[name[path_spec]]]]
|
keyword[def] identifier[GetRootFileEntry] ( identifier[self] ):
literal[string]
identifier[path_spec] = identifier[data_range_path_spec] . identifier[DataRangePathSpec] (
identifier[range_offset] = identifier[self] . identifier[_range_offset] ,
identifier[range_size] = identifier[self] . identifier[_range_size] ,
identifier[parent] = identifier[self] . identifier[_path_spec] . identifier[parent] )
keyword[return] identifier[self] . identifier[GetFileEntryByPathSpec] ( identifier[path_spec] )
|
def GetRootFileEntry(self):
"""Retrieves the root file entry.
Returns:
DataRangeFileEntry: a file entry or None if not available.
"""
path_spec = data_range_path_spec.DataRangePathSpec(range_offset=self._range_offset, range_size=self._range_size, parent=self._path_spec.parent)
return self.GetFileEntryByPathSpec(path_spec)
|
def filter(subtags):
"""
Get a list of non-existing string subtag(s) given the input string subtag(s).
:param subtags: string subtag or a list of string subtags.
:return: list of non-existing string subtags. The return list can be empty.
"""
if not isinstance(subtags, list):
subtags = [subtags]
return [subtag for subtag in subtags if len(tags.types(subtag)) == 0]
|
def function[filter, parameter[subtags]]:
constant[
Get a list of non-existing string subtag(s) given the input string subtag(s).
:param subtags: string subtag or a list of string subtags.
:return: list of non-existing string subtags. The return list can be empty.
]
if <ast.UnaryOp object at 0x7da1b258aa40> begin[:]
variable[subtags] assign[=] list[[<ast.Name object at 0x7da1b258a2f0>]]
return[<ast.ListComp object at 0x7da1b2589ae0>]
|
keyword[def] identifier[filter] ( identifier[subtags] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[subtags] , identifier[list] ):
identifier[subtags] =[ identifier[subtags] ]
keyword[return] [ identifier[subtag] keyword[for] identifier[subtag] keyword[in] identifier[subtags] keyword[if] identifier[len] ( identifier[tags] . identifier[types] ( identifier[subtag] ))== literal[int] ]
|
def filter(subtags):
"""
Get a list of non-existing string subtag(s) given the input string subtag(s).
:param subtags: string subtag or a list of string subtags.
:return: list of non-existing string subtags. The return list can be empty.
"""
if not isinstance(subtags, list):
subtags = [subtags] # depends on [control=['if'], data=[]]
return [subtag for subtag in subtags if len(tags.types(subtag)) == 0]
|
def _get_struct_fillstyle(self, shape_number):
"""Get the values for the FILLSTYLE record."""
obj = _make_object("FillStyle")
obj.FillStyleType = style_type = unpack_ui8(self._src)
if style_type == 0x00:
if shape_number <= 2:
obj.Color = self._get_struct_rgb()
else:
obj.Color = self._get_struct_rgba()
if style_type in (0x10, 0x12, 0x13):
obj.GradientMatrix = self._get_struct_matrix()
if style_type in (0x10, 0x12):
obj.Gradient = self._get_struct_gradient(shape_number)
if style_type == 0x13:
obj.Gradient = self._get_struct_focalgradient(shape_number)
if style_type in (0x40, 0x41, 0x42, 0x43):
obj.BitmapId = unpack_ui16(self._src)
obj.BitmapMatrix = self._get_struct_matrix()
return obj
|
def function[_get_struct_fillstyle, parameter[self, shape_number]]:
constant[Get the values for the FILLSTYLE record.]
variable[obj] assign[=] call[name[_make_object], parameter[constant[FillStyle]]]
name[obj].FillStyleType assign[=] call[name[unpack_ui8], parameter[name[self]._src]]
if compare[name[style_type] equal[==] constant[0]] begin[:]
if compare[name[shape_number] less_or_equal[<=] constant[2]] begin[:]
name[obj].Color assign[=] call[name[self]._get_struct_rgb, parameter[]]
if compare[name[style_type] in tuple[[<ast.Constant object at 0x7da18bccbe50>, <ast.Constant object at 0x7da18bccb250>, <ast.Constant object at 0x7da18bcc9ea0>]]] begin[:]
name[obj].GradientMatrix assign[=] call[name[self]._get_struct_matrix, parameter[]]
if compare[name[style_type] in tuple[[<ast.Constant object at 0x7da18bccb460>, <ast.Constant object at 0x7da18bcc8ca0>]]] begin[:]
name[obj].Gradient assign[=] call[name[self]._get_struct_gradient, parameter[name[shape_number]]]
if compare[name[style_type] equal[==] constant[19]] begin[:]
name[obj].Gradient assign[=] call[name[self]._get_struct_focalgradient, parameter[name[shape_number]]]
if compare[name[style_type] in tuple[[<ast.Constant object at 0x7da18bcc8dc0>, <ast.Constant object at 0x7da18bcc9750>, <ast.Constant object at 0x7da18bcc9a80>, <ast.Constant object at 0x7da18bcca650>]]] begin[:]
name[obj].BitmapId assign[=] call[name[unpack_ui16], parameter[name[self]._src]]
name[obj].BitmapMatrix assign[=] call[name[self]._get_struct_matrix, parameter[]]
return[name[obj]]
|
keyword[def] identifier[_get_struct_fillstyle] ( identifier[self] , identifier[shape_number] ):
literal[string]
identifier[obj] = identifier[_make_object] ( literal[string] )
identifier[obj] . identifier[FillStyleType] = identifier[style_type] = identifier[unpack_ui8] ( identifier[self] . identifier[_src] )
keyword[if] identifier[style_type] == literal[int] :
keyword[if] identifier[shape_number] <= literal[int] :
identifier[obj] . identifier[Color] = identifier[self] . identifier[_get_struct_rgb] ()
keyword[else] :
identifier[obj] . identifier[Color] = identifier[self] . identifier[_get_struct_rgba] ()
keyword[if] identifier[style_type] keyword[in] ( literal[int] , literal[int] , literal[int] ):
identifier[obj] . identifier[GradientMatrix] = identifier[self] . identifier[_get_struct_matrix] ()
keyword[if] identifier[style_type] keyword[in] ( literal[int] , literal[int] ):
identifier[obj] . identifier[Gradient] = identifier[self] . identifier[_get_struct_gradient] ( identifier[shape_number] )
keyword[if] identifier[style_type] == literal[int] :
identifier[obj] . identifier[Gradient] = identifier[self] . identifier[_get_struct_focalgradient] ( identifier[shape_number] )
keyword[if] identifier[style_type] keyword[in] ( literal[int] , literal[int] , literal[int] , literal[int] ):
identifier[obj] . identifier[BitmapId] = identifier[unpack_ui16] ( identifier[self] . identifier[_src] )
identifier[obj] . identifier[BitmapMatrix] = identifier[self] . identifier[_get_struct_matrix] ()
keyword[return] identifier[obj]
|
def _get_struct_fillstyle(self, shape_number):
"""Get the values for the FILLSTYLE record."""
obj = _make_object('FillStyle')
obj.FillStyleType = style_type = unpack_ui8(self._src)
if style_type == 0:
if shape_number <= 2:
obj.Color = self._get_struct_rgb() # depends on [control=['if'], data=[]]
else:
obj.Color = self._get_struct_rgba() # depends on [control=['if'], data=[]]
if style_type in (16, 18, 19):
obj.GradientMatrix = self._get_struct_matrix() # depends on [control=['if'], data=[]]
if style_type in (16, 18):
obj.Gradient = self._get_struct_gradient(shape_number) # depends on [control=['if'], data=[]]
if style_type == 19:
obj.Gradient = self._get_struct_focalgradient(shape_number) # depends on [control=['if'], data=[]]
if style_type in (64, 65, 66, 67):
obj.BitmapId = unpack_ui16(self._src)
obj.BitmapMatrix = self._get_struct_matrix() # depends on [control=['if'], data=[]]
return obj
|
def writeMNIST(sc, input_images, input_labels, output, format, num_partitions):
"""Writes MNIST image/label vectors into parallelized files on HDFS"""
# load MNIST gzip into memory
with open(input_images, 'rb') as f:
images = numpy.array(mnist.extract_images(f))
with open(input_labels, 'rb') as f:
if format == "csv2":
labels = numpy.array(mnist.extract_labels(f, one_hot=False))
else:
labels = numpy.array(mnist.extract_labels(f, one_hot=True))
shape = images.shape
print("images.shape: {0}".format(shape)) # 60000 x 28 x 28
print("labels.shape: {0}".format(labels.shape)) # 60000 x 10
# create RDDs of vectors
imageRDD = sc.parallelize(images.reshape(shape[0], shape[1] * shape[2]), num_partitions)
labelRDD = sc.parallelize(labels, num_partitions)
output_images = output + "/images"
output_labels = output + "/labels"
# save RDDs as specific format
if format == "pickle":
imageRDD.saveAsPickleFile(output_images)
labelRDD.saveAsPickleFile(output_labels)
elif format == "csv":
imageRDD.map(toCSV).saveAsTextFile(output_images)
labelRDD.map(toCSV).saveAsTextFile(output_labels)
elif format == "csv2":
imageRDD.map(toCSV).zip(labelRDD).map(lambda x: str(x[1]) + "|" + x[0]).saveAsTextFile(output)
else: # format == "tfr":
tfRDD = imageRDD.zip(labelRDD).map(lambda x: (bytearray(toTFExample(x[0], x[1])), None))
# requires: --jars tensorflow-hadoop-1.0-SNAPSHOT.jar
tfRDD.saveAsNewAPIHadoopFile(output, "org.tensorflow.hadoop.io.TFRecordFileOutputFormat",
keyClass="org.apache.hadoop.io.BytesWritable",
valueClass="org.apache.hadoop.io.NullWritable")
|
def function[writeMNIST, parameter[sc, input_images, input_labels, output, format, num_partitions]]:
constant[Writes MNIST image/label vectors into parallelized files on HDFS]
with call[name[open], parameter[name[input_images], constant[rb]]] begin[:]
variable[images] assign[=] call[name[numpy].array, parameter[call[name[mnist].extract_images, parameter[name[f]]]]]
with call[name[open], parameter[name[input_labels], constant[rb]]] begin[:]
if compare[name[format] equal[==] constant[csv2]] begin[:]
variable[labels] assign[=] call[name[numpy].array, parameter[call[name[mnist].extract_labels, parameter[name[f]]]]]
variable[shape] assign[=] name[images].shape
call[name[print], parameter[call[constant[images.shape: {0}].format, parameter[name[shape]]]]]
call[name[print], parameter[call[constant[labels.shape: {0}].format, parameter[name[labels].shape]]]]
variable[imageRDD] assign[=] call[name[sc].parallelize, parameter[call[name[images].reshape, parameter[call[name[shape]][constant[0]], binary_operation[call[name[shape]][constant[1]] * call[name[shape]][constant[2]]]]], name[num_partitions]]]
variable[labelRDD] assign[=] call[name[sc].parallelize, parameter[name[labels], name[num_partitions]]]
variable[output_images] assign[=] binary_operation[name[output] + constant[/images]]
variable[output_labels] assign[=] binary_operation[name[output] + constant[/labels]]
if compare[name[format] equal[==] constant[pickle]] begin[:]
call[name[imageRDD].saveAsPickleFile, parameter[name[output_images]]]
call[name[labelRDD].saveAsPickleFile, parameter[name[output_labels]]]
|
keyword[def] identifier[writeMNIST] ( identifier[sc] , identifier[input_images] , identifier[input_labels] , identifier[output] , identifier[format] , identifier[num_partitions] ):
literal[string]
keyword[with] identifier[open] ( identifier[input_images] , literal[string] ) keyword[as] identifier[f] :
identifier[images] = identifier[numpy] . identifier[array] ( identifier[mnist] . identifier[extract_images] ( identifier[f] ))
keyword[with] identifier[open] ( identifier[input_labels] , literal[string] ) keyword[as] identifier[f] :
keyword[if] identifier[format] == literal[string] :
identifier[labels] = identifier[numpy] . identifier[array] ( identifier[mnist] . identifier[extract_labels] ( identifier[f] , identifier[one_hot] = keyword[False] ))
keyword[else] :
identifier[labels] = identifier[numpy] . identifier[array] ( identifier[mnist] . identifier[extract_labels] ( identifier[f] , identifier[one_hot] = keyword[True] ))
identifier[shape] = identifier[images] . identifier[shape]
identifier[print] ( literal[string] . identifier[format] ( identifier[shape] ))
identifier[print] ( literal[string] . identifier[format] ( identifier[labels] . identifier[shape] ))
identifier[imageRDD] = identifier[sc] . identifier[parallelize] ( identifier[images] . identifier[reshape] ( identifier[shape] [ literal[int] ], identifier[shape] [ literal[int] ]* identifier[shape] [ literal[int] ]), identifier[num_partitions] )
identifier[labelRDD] = identifier[sc] . identifier[parallelize] ( identifier[labels] , identifier[num_partitions] )
identifier[output_images] = identifier[output] + literal[string]
identifier[output_labels] = identifier[output] + literal[string]
keyword[if] identifier[format] == literal[string] :
identifier[imageRDD] . identifier[saveAsPickleFile] ( identifier[output_images] )
identifier[labelRDD] . identifier[saveAsPickleFile] ( identifier[output_labels] )
keyword[elif] identifier[format] == literal[string] :
identifier[imageRDD] . identifier[map] ( identifier[toCSV] ). identifier[saveAsTextFile] ( identifier[output_images] )
identifier[labelRDD] . identifier[map] ( identifier[toCSV] ). identifier[saveAsTextFile] ( identifier[output_labels] )
keyword[elif] identifier[format] == literal[string] :
identifier[imageRDD] . identifier[map] ( identifier[toCSV] ). identifier[zip] ( identifier[labelRDD] ). identifier[map] ( keyword[lambda] identifier[x] : identifier[str] ( identifier[x] [ literal[int] ])+ literal[string] + identifier[x] [ literal[int] ]). identifier[saveAsTextFile] ( identifier[output] )
keyword[else] :
identifier[tfRDD] = identifier[imageRDD] . identifier[zip] ( identifier[labelRDD] ). identifier[map] ( keyword[lambda] identifier[x] :( identifier[bytearray] ( identifier[toTFExample] ( identifier[x] [ literal[int] ], identifier[x] [ literal[int] ])), keyword[None] ))
identifier[tfRDD] . identifier[saveAsNewAPIHadoopFile] ( identifier[output] , literal[string] ,
identifier[keyClass] = literal[string] ,
identifier[valueClass] = literal[string] )
|
def writeMNIST(sc, input_images, input_labels, output, format, num_partitions):
"""Writes MNIST image/label vectors into parallelized files on HDFS"""
# load MNIST gzip into memory
with open(input_images, 'rb') as f:
images = numpy.array(mnist.extract_images(f)) # depends on [control=['with'], data=['f']]
with open(input_labels, 'rb') as f:
if format == 'csv2':
labels = numpy.array(mnist.extract_labels(f, one_hot=False)) # depends on [control=['if'], data=[]]
else:
labels = numpy.array(mnist.extract_labels(f, one_hot=True)) # depends on [control=['with'], data=['f']]
shape = images.shape
print('images.shape: {0}'.format(shape)) # 60000 x 28 x 28
print('labels.shape: {0}'.format(labels.shape)) # 60000 x 10
# create RDDs of vectors
imageRDD = sc.parallelize(images.reshape(shape[0], shape[1] * shape[2]), num_partitions)
labelRDD = sc.parallelize(labels, num_partitions)
output_images = output + '/images'
output_labels = output + '/labels'
# save RDDs as specific format
if format == 'pickle':
imageRDD.saveAsPickleFile(output_images)
labelRDD.saveAsPickleFile(output_labels) # depends on [control=['if'], data=[]]
elif format == 'csv':
imageRDD.map(toCSV).saveAsTextFile(output_images)
labelRDD.map(toCSV).saveAsTextFile(output_labels) # depends on [control=['if'], data=[]]
elif format == 'csv2':
imageRDD.map(toCSV).zip(labelRDD).map(lambda x: str(x[1]) + '|' + x[0]).saveAsTextFile(output) # depends on [control=['if'], data=[]]
else: # format == "tfr":
tfRDD = imageRDD.zip(labelRDD).map(lambda x: (bytearray(toTFExample(x[0], x[1])), None))
# requires: --jars tensorflow-hadoop-1.0-SNAPSHOT.jar
tfRDD.saveAsNewAPIHadoopFile(output, 'org.tensorflow.hadoop.io.TFRecordFileOutputFormat', keyClass='org.apache.hadoop.io.BytesWritable', valueClass='org.apache.hadoop.io.NullWritable')
|
def _get_amp_pha(self, data, which_data):
"""Convert input data to phase and amplitude
Parameters
----------
data: 2d ndarray (float or complex) or list
The experimental data (see `which_data`)
which_data: str
String or comma-separated list of strings indicating
the order and type of input data. Valid values are
"field", "phase", "hologram", "phase,amplitude", or
"phase,intensity", where the latter two require an
indexable object with the phase data as first element.
Returns
-------
amp, pha: tuple of (:class:`Amplitdue`, :class:`Phase`)
"""
which_data = QPImage._conv_which_data(which_data)
if which_data not in VALID_INPUT_DATA:
msg = "`which_data` must be one of {}!".format(VALID_INPUT_DATA)
raise ValueError(msg)
if which_data == "field":
amp = np.abs(data)
pha = np.angle(data)
elif which_data == "phase":
pha = data
amp = np.ones_like(data)
elif which_data == ("phase", "amplitude"):
amp = data[1]
pha = data[0]
elif which_data == ("phase", "intensity"):
amp = np.sqrt(data[1])
pha = data[0]
elif which_data == "hologram":
amp, pha = self._get_amp_pha(holo.get_field(data, **self.holo_kw),
which_data="field")
if amp.size == 0 or pha.size == 0:
msg = "`data` with shape {} has zero size!".format(amp.shape)
raise ValueError(msg)
# phase unwrapping (take into account nans)
nanmask = np.isnan(pha)
if np.sum(nanmask):
# create masked array
# skimage.restoration.unwrap_phase cannot handle nan data
# (even if masked)
pham = pha.copy()
pham[nanmask] = 0
pham = np.ma.masked_array(pham, mask=nanmask)
pha = unwrap_phase(pham, seed=47)
pha[nanmask] = np.nan
else:
pha = unwrap_phase(pha, seed=47)
return amp, pha
|
def function[_get_amp_pha, parameter[self, data, which_data]]:
constant[Convert input data to phase and amplitude
Parameters
----------
data: 2d ndarray (float or complex) or list
The experimental data (see `which_data`)
which_data: str
String or comma-separated list of strings indicating
the order and type of input data. Valid values are
"field", "phase", "hologram", "phase,amplitude", or
"phase,intensity", where the latter two require an
indexable object with the phase data as first element.
Returns
-------
amp, pha: tuple of (:class:`Amplitdue`, :class:`Phase`)
]
variable[which_data] assign[=] call[name[QPImage]._conv_which_data, parameter[name[which_data]]]
if compare[name[which_data] <ast.NotIn object at 0x7da2590d7190> name[VALID_INPUT_DATA]] begin[:]
variable[msg] assign[=] call[constant[`which_data` must be one of {}!].format, parameter[name[VALID_INPUT_DATA]]]
<ast.Raise object at 0x7da1b1038a00>
if compare[name[which_data] equal[==] constant[field]] begin[:]
variable[amp] assign[=] call[name[np].abs, parameter[name[data]]]
variable[pha] assign[=] call[name[np].angle, parameter[name[data]]]
if <ast.BoolOp object at 0x7da1b103a2c0> begin[:]
variable[msg] assign[=] call[constant[`data` with shape {} has zero size!].format, parameter[name[amp].shape]]
<ast.Raise object at 0x7da1b103a4a0>
variable[nanmask] assign[=] call[name[np].isnan, parameter[name[pha]]]
if call[name[np].sum, parameter[name[nanmask]]] begin[:]
variable[pham] assign[=] call[name[pha].copy, parameter[]]
call[name[pham]][name[nanmask]] assign[=] constant[0]
variable[pham] assign[=] call[name[np].ma.masked_array, parameter[name[pham]]]
variable[pha] assign[=] call[name[unwrap_phase], parameter[name[pham]]]
call[name[pha]][name[nanmask]] assign[=] name[np].nan
return[tuple[[<ast.Name object at 0x7da1b10447c0>, <ast.Name object at 0x7da1b1047490>]]]
|
keyword[def] identifier[_get_amp_pha] ( identifier[self] , identifier[data] , identifier[which_data] ):
literal[string]
identifier[which_data] = identifier[QPImage] . identifier[_conv_which_data] ( identifier[which_data] )
keyword[if] identifier[which_data] keyword[not] keyword[in] identifier[VALID_INPUT_DATA] :
identifier[msg] = literal[string] . identifier[format] ( identifier[VALID_INPUT_DATA] )
keyword[raise] identifier[ValueError] ( identifier[msg] )
keyword[if] identifier[which_data] == literal[string] :
identifier[amp] = identifier[np] . identifier[abs] ( identifier[data] )
identifier[pha] = identifier[np] . identifier[angle] ( identifier[data] )
keyword[elif] identifier[which_data] == literal[string] :
identifier[pha] = identifier[data]
identifier[amp] = identifier[np] . identifier[ones_like] ( identifier[data] )
keyword[elif] identifier[which_data] ==( literal[string] , literal[string] ):
identifier[amp] = identifier[data] [ literal[int] ]
identifier[pha] = identifier[data] [ literal[int] ]
keyword[elif] identifier[which_data] ==( literal[string] , literal[string] ):
identifier[amp] = identifier[np] . identifier[sqrt] ( identifier[data] [ literal[int] ])
identifier[pha] = identifier[data] [ literal[int] ]
keyword[elif] identifier[which_data] == literal[string] :
identifier[amp] , identifier[pha] = identifier[self] . identifier[_get_amp_pha] ( identifier[holo] . identifier[get_field] ( identifier[data] ,** identifier[self] . identifier[holo_kw] ),
identifier[which_data] = literal[string] )
keyword[if] identifier[amp] . identifier[size] == literal[int] keyword[or] identifier[pha] . identifier[size] == literal[int] :
identifier[msg] = literal[string] . identifier[format] ( identifier[amp] . identifier[shape] )
keyword[raise] identifier[ValueError] ( identifier[msg] )
identifier[nanmask] = identifier[np] . identifier[isnan] ( identifier[pha] )
keyword[if] identifier[np] . identifier[sum] ( identifier[nanmask] ):
identifier[pham] = identifier[pha] . identifier[copy] ()
identifier[pham] [ identifier[nanmask] ]= literal[int]
identifier[pham] = identifier[np] . identifier[ma] . identifier[masked_array] ( identifier[pham] , identifier[mask] = identifier[nanmask] )
identifier[pha] = identifier[unwrap_phase] ( identifier[pham] , identifier[seed] = literal[int] )
identifier[pha] [ identifier[nanmask] ]= identifier[np] . identifier[nan]
keyword[else] :
identifier[pha] = identifier[unwrap_phase] ( identifier[pha] , identifier[seed] = literal[int] )
keyword[return] identifier[amp] , identifier[pha]
|
def _get_amp_pha(self, data, which_data):
"""Convert input data to phase and amplitude
Parameters
----------
data: 2d ndarray (float or complex) or list
The experimental data (see `which_data`)
which_data: str
String or comma-separated list of strings indicating
the order and type of input data. Valid values are
"field", "phase", "hologram", "phase,amplitude", or
"phase,intensity", where the latter two require an
indexable object with the phase data as first element.
Returns
-------
amp, pha: tuple of (:class:`Amplitdue`, :class:`Phase`)
"""
which_data = QPImage._conv_which_data(which_data)
if which_data not in VALID_INPUT_DATA:
msg = '`which_data` must be one of {}!'.format(VALID_INPUT_DATA)
raise ValueError(msg) # depends on [control=['if'], data=['VALID_INPUT_DATA']]
if which_data == 'field':
amp = np.abs(data)
pha = np.angle(data) # depends on [control=['if'], data=[]]
elif which_data == 'phase':
pha = data
amp = np.ones_like(data) # depends on [control=['if'], data=[]]
elif which_data == ('phase', 'amplitude'):
amp = data[1]
pha = data[0] # depends on [control=['if'], data=[]]
elif which_data == ('phase', 'intensity'):
amp = np.sqrt(data[1])
pha = data[0] # depends on [control=['if'], data=[]]
elif which_data == 'hologram':
(amp, pha) = self._get_amp_pha(holo.get_field(data, **self.holo_kw), which_data='field') # depends on [control=['if'], data=[]]
if amp.size == 0 or pha.size == 0:
msg = '`data` with shape {} has zero size!'.format(amp.shape)
raise ValueError(msg) # depends on [control=['if'], data=[]]
# phase unwrapping (take into account nans)
nanmask = np.isnan(pha)
if np.sum(nanmask):
# create masked array
# skimage.restoration.unwrap_phase cannot handle nan data
# (even if masked)
pham = pha.copy()
pham[nanmask] = 0
pham = np.ma.masked_array(pham, mask=nanmask)
pha = unwrap_phase(pham, seed=47)
pha[nanmask] = np.nan # depends on [control=['if'], data=[]]
else:
pha = unwrap_phase(pha, seed=47)
return (amp, pha)
|
def checkTransitionType(self,state):
"""
Check whether the transition function returns output of the correct types.
This can be either a dictionary with as keys ints/tuples and values floats.
Or a tuple consisting of a 2d integer numpy array with states and a 1d numpy array with rates.
"""
test = self.transition(state)
assert isinstance(test,(dict,tuple)), "Transition function does not return a dict or tuple"
if isinstance(test,dict):
assert all(isinstance(states, (int,tuple)) for states in test.keys()), "Transition function returns a dict, but states are not represented as tuples or integers"
assert all(isinstance(rates, float) for rates in test.values()), "Transition function returns a dict, but the rates should be floats."
usesNumpy=False
if isinstance(test,tuple):
assert len(test)==2, "The transition function should return two variables: states and rates."
states,rates = test
assert isinstance(states, np.ndarray) and states.ndim==2 and issubclass(states.dtype.type, np.integer), "The states returned by the transition function need to be an integer 2d numpy array: %r" %states
assert isinstance(rates, np.ndarray) and rates.ndim==1, "The rates returned by the transition function need to be a 1d numpy array: %r" % rates
usesNumpy = True
return usesNumpy
|
def function[checkTransitionType, parameter[self, state]]:
constant[
Check whether the transition function returns output of the correct types.
This can be either a dictionary with as keys ints/tuples and values floats.
Or a tuple consisting of a 2d integer numpy array with states and a 1d numpy array with rates.
]
variable[test] assign[=] call[name[self].transition, parameter[name[state]]]
assert[call[name[isinstance], parameter[name[test], tuple[[<ast.Name object at 0x7da18fe927d0>, <ast.Name object at 0x7da18fe91540>]]]]]
if call[name[isinstance], parameter[name[test], name[dict]]] begin[:]
assert[call[name[all], parameter[<ast.GeneratorExp object at 0x7da18fe91300>]]]
assert[call[name[all], parameter[<ast.GeneratorExp object at 0x7da18fe91360>]]]
variable[usesNumpy] assign[=] constant[False]
if call[name[isinstance], parameter[name[test], name[tuple]]] begin[:]
assert[compare[call[name[len], parameter[name[test]]] equal[==] constant[2]]]
<ast.Tuple object at 0x7da18fe90a90> assign[=] name[test]
assert[<ast.BoolOp object at 0x7da18fe934f0>]
assert[<ast.BoolOp object at 0x7da204347eb0>]
variable[usesNumpy] assign[=] constant[True]
return[name[usesNumpy]]
|
keyword[def] identifier[checkTransitionType] ( identifier[self] , identifier[state] ):
literal[string]
identifier[test] = identifier[self] . identifier[transition] ( identifier[state] )
keyword[assert] identifier[isinstance] ( identifier[test] ,( identifier[dict] , identifier[tuple] )), literal[string]
keyword[if] identifier[isinstance] ( identifier[test] , identifier[dict] ):
keyword[assert] identifier[all] ( identifier[isinstance] ( identifier[states] ,( identifier[int] , identifier[tuple] )) keyword[for] identifier[states] keyword[in] identifier[test] . identifier[keys] ()), literal[string]
keyword[assert] identifier[all] ( identifier[isinstance] ( identifier[rates] , identifier[float] ) keyword[for] identifier[rates] keyword[in] identifier[test] . identifier[values] ()), literal[string]
identifier[usesNumpy] = keyword[False]
keyword[if] identifier[isinstance] ( identifier[test] , identifier[tuple] ):
keyword[assert] identifier[len] ( identifier[test] )== literal[int] , literal[string]
identifier[states] , identifier[rates] = identifier[test]
keyword[assert] identifier[isinstance] ( identifier[states] , identifier[np] . identifier[ndarray] ) keyword[and] identifier[states] . identifier[ndim] == literal[int] keyword[and] identifier[issubclass] ( identifier[states] . identifier[dtype] . identifier[type] , identifier[np] . identifier[integer] ), literal[string] % identifier[states]
keyword[assert] identifier[isinstance] ( identifier[rates] , identifier[np] . identifier[ndarray] ) keyword[and] identifier[rates] . identifier[ndim] == literal[int] , literal[string] % identifier[rates]
identifier[usesNumpy] = keyword[True]
keyword[return] identifier[usesNumpy]
|
def checkTransitionType(self, state):
"""
Check whether the transition function returns output of the correct types.
This can be either a dictionary with as keys ints/tuples and values floats.
Or a tuple consisting of a 2d integer numpy array with states and a 1d numpy array with rates.
"""
test = self.transition(state)
assert isinstance(test, (dict, tuple)), 'Transition function does not return a dict or tuple'
if isinstance(test, dict):
assert all((isinstance(states, (int, tuple)) for states in test.keys())), 'Transition function returns a dict, but states are not represented as tuples or integers'
assert all((isinstance(rates, float) for rates in test.values())), 'Transition function returns a dict, but the rates should be floats.'
usesNumpy = False # depends on [control=['if'], data=[]]
if isinstance(test, tuple):
assert len(test) == 2, 'The transition function should return two variables: states and rates.'
(states, rates) = test
assert isinstance(states, np.ndarray) and states.ndim == 2 and issubclass(states.dtype.type, np.integer), 'The states returned by the transition function need to be an integer 2d numpy array: %r' % states
assert isinstance(rates, np.ndarray) and rates.ndim == 1, 'The rates returned by the transition function need to be a 1d numpy array: %r' % rates
usesNumpy = True # depends on [control=['if'], data=[]]
return usesNumpy
|
def cfnumber_to_number(cfnumber):
"""Convert CFNumber to python int or float."""
numeric_type = cf.CFNumberGetType(cfnumber)
cfnum_to_ctype = {kCFNumberSInt8Type: c_int8, kCFNumberSInt16Type: c_int16,
kCFNumberSInt32Type: c_int32,
kCFNumberSInt64Type: c_int64,
kCFNumberFloat32Type: c_float,
kCFNumberFloat64Type: c_double,
kCFNumberCharType: c_byte, kCFNumberShortType: c_short,
kCFNumberIntType: c_int, kCFNumberLongType: c_long,
kCFNumberLongLongType: c_longlong,
kCFNumberFloatType: c_float,
kCFNumberDoubleType: c_double,
kCFNumberCFIndexType: CFIndex,
kCFNumberCGFloatType: CGFloat}
if numeric_type in cfnum_to_ctype:
t = cfnum_to_ctype[numeric_type]
result = t()
if cf.CFNumberGetValue(cfnumber, numeric_type, byref(result)):
return result.value
else:
raise Exception(
'cfnumber_to_number: unhandled CFNumber type %d' % numeric_type)
|
def function[cfnumber_to_number, parameter[cfnumber]]:
constant[Convert CFNumber to python int or float.]
variable[numeric_type] assign[=] call[name[cf].CFNumberGetType, parameter[name[cfnumber]]]
variable[cfnum_to_ctype] assign[=] dictionary[[<ast.Name object at 0x7da1b0e427d0>, <ast.Name object at 0x7da1b0e41c00>, <ast.Name object at 0x7da1b0e417b0>, <ast.Name object at 0x7da1b0e42ad0>, <ast.Name object at 0x7da1b0e41270>, <ast.Name object at 0x7da1b0e41a20>, <ast.Name object at 0x7da1b0e40910>, <ast.Name object at 0x7da1b0e43970>, <ast.Name object at 0x7da1b0e43040>, <ast.Name object at 0x7da1b0e42560>, <ast.Name object at 0x7da1b0e42bc0>, <ast.Name object at 0x7da1b0e41750>, <ast.Name object at 0x7da1b0e41c30>, <ast.Name object at 0x7da1b0e42740>, <ast.Name object at 0x7da1b0e43b50>], [<ast.Name object at 0x7da1b0e430a0>, <ast.Name object at 0x7da1b0e422f0>, <ast.Name object at 0x7da1b0e41090>, <ast.Name object at 0x7da1b0e414b0>, <ast.Name object at 0x7da1b0e42860>, <ast.Name object at 0x7da1b0e42440>, <ast.Name object at 0x7da1b0e42cb0>, <ast.Name object at 0x7da1b0e42470>, <ast.Name object at 0x7da1b0e43af0>, <ast.Name object at 0x7da1b0e43f40>, <ast.Name object at 0x7da1b0e43b80>, <ast.Name object at 0x7da1b0e41060>, <ast.Name object at 0x7da1b0e415d0>, <ast.Name object at 0x7da1b0e42da0>, <ast.Name object at 0x7da1b0e41e40>]]
if compare[name[numeric_type] in name[cfnum_to_ctype]] begin[:]
variable[t] assign[=] call[name[cfnum_to_ctype]][name[numeric_type]]
variable[result] assign[=] call[name[t], parameter[]]
if call[name[cf].CFNumberGetValue, parameter[name[cfnumber], name[numeric_type], call[name[byref], parameter[name[result]]]]] begin[:]
return[name[result].value]
|
keyword[def] identifier[cfnumber_to_number] ( identifier[cfnumber] ):
literal[string]
identifier[numeric_type] = identifier[cf] . identifier[CFNumberGetType] ( identifier[cfnumber] )
identifier[cfnum_to_ctype] ={ identifier[kCFNumberSInt8Type] : identifier[c_int8] , identifier[kCFNumberSInt16Type] : identifier[c_int16] ,
identifier[kCFNumberSInt32Type] : identifier[c_int32] ,
identifier[kCFNumberSInt64Type] : identifier[c_int64] ,
identifier[kCFNumberFloat32Type] : identifier[c_float] ,
identifier[kCFNumberFloat64Type] : identifier[c_double] ,
identifier[kCFNumberCharType] : identifier[c_byte] , identifier[kCFNumberShortType] : identifier[c_short] ,
identifier[kCFNumberIntType] : identifier[c_int] , identifier[kCFNumberLongType] : identifier[c_long] ,
identifier[kCFNumberLongLongType] : identifier[c_longlong] ,
identifier[kCFNumberFloatType] : identifier[c_float] ,
identifier[kCFNumberDoubleType] : identifier[c_double] ,
identifier[kCFNumberCFIndexType] : identifier[CFIndex] ,
identifier[kCFNumberCGFloatType] : identifier[CGFloat] }
keyword[if] identifier[numeric_type] keyword[in] identifier[cfnum_to_ctype] :
identifier[t] = identifier[cfnum_to_ctype] [ identifier[numeric_type] ]
identifier[result] = identifier[t] ()
keyword[if] identifier[cf] . identifier[CFNumberGetValue] ( identifier[cfnumber] , identifier[numeric_type] , identifier[byref] ( identifier[result] )):
keyword[return] identifier[result] . identifier[value]
keyword[else] :
keyword[raise] identifier[Exception] (
literal[string] % identifier[numeric_type] )
|
def cfnumber_to_number(cfnumber):
"""Convert CFNumber to python int or float."""
numeric_type = cf.CFNumberGetType(cfnumber)
cfnum_to_ctype = {kCFNumberSInt8Type: c_int8, kCFNumberSInt16Type: c_int16, kCFNumberSInt32Type: c_int32, kCFNumberSInt64Type: c_int64, kCFNumberFloat32Type: c_float, kCFNumberFloat64Type: c_double, kCFNumberCharType: c_byte, kCFNumberShortType: c_short, kCFNumberIntType: c_int, kCFNumberLongType: c_long, kCFNumberLongLongType: c_longlong, kCFNumberFloatType: c_float, kCFNumberDoubleType: c_double, kCFNumberCFIndexType: CFIndex, kCFNumberCGFloatType: CGFloat}
if numeric_type in cfnum_to_ctype:
t = cfnum_to_ctype[numeric_type]
result = t()
if cf.CFNumberGetValue(cfnumber, numeric_type, byref(result)):
return result.value # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['numeric_type', 'cfnum_to_ctype']]
else:
raise Exception('cfnumber_to_number: unhandled CFNumber type %d' % numeric_type)
|
def insert(self, doc_or_docs, **kwargs):
"""Insert method
"""
check = kwargs.pop('check', True)
if isinstance(doc_or_docs, dict):
if check is True:
doc_or_docs = self._valid_record(doc_or_docs)
result = self.__collect.insert_one(doc_or_docs, **kwargs)
return result.inserted_id
else:
if check is True:
for d in doc_or_docs:
d = self._valid_record(d)
result = self.__collect.insert_many(doc_or_docs, **kwargs)
return result.inserted_ids
|
def function[insert, parameter[self, doc_or_docs]]:
constant[Insert method
]
variable[check] assign[=] call[name[kwargs].pop, parameter[constant[check], constant[True]]]
if call[name[isinstance], parameter[name[doc_or_docs], name[dict]]] begin[:]
if compare[name[check] is constant[True]] begin[:]
variable[doc_or_docs] assign[=] call[name[self]._valid_record, parameter[name[doc_or_docs]]]
variable[result] assign[=] call[name[self].__collect.insert_one, parameter[name[doc_or_docs]]]
return[name[result].inserted_id]
|
keyword[def] identifier[insert] ( identifier[self] , identifier[doc_or_docs] ,** identifier[kwargs] ):
literal[string]
identifier[check] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[True] )
keyword[if] identifier[isinstance] ( identifier[doc_or_docs] , identifier[dict] ):
keyword[if] identifier[check] keyword[is] keyword[True] :
identifier[doc_or_docs] = identifier[self] . identifier[_valid_record] ( identifier[doc_or_docs] )
identifier[result] = identifier[self] . identifier[__collect] . identifier[insert_one] ( identifier[doc_or_docs] ,** identifier[kwargs] )
keyword[return] identifier[result] . identifier[inserted_id]
keyword[else] :
keyword[if] identifier[check] keyword[is] keyword[True] :
keyword[for] identifier[d] keyword[in] identifier[doc_or_docs] :
identifier[d] = identifier[self] . identifier[_valid_record] ( identifier[d] )
identifier[result] = identifier[self] . identifier[__collect] . identifier[insert_many] ( identifier[doc_or_docs] ,** identifier[kwargs] )
keyword[return] identifier[result] . identifier[inserted_ids]
|
def insert(self, doc_or_docs, **kwargs):
"""Insert method
"""
check = kwargs.pop('check', True)
if isinstance(doc_or_docs, dict):
if check is True:
doc_or_docs = self._valid_record(doc_or_docs) # depends on [control=['if'], data=[]]
result = self.__collect.insert_one(doc_or_docs, **kwargs)
return result.inserted_id # depends on [control=['if'], data=[]]
else:
if check is True:
for d in doc_or_docs:
d = self._valid_record(d) # depends on [control=['for'], data=['d']] # depends on [control=['if'], data=[]]
result = self.__collect.insert_many(doc_or_docs, **kwargs)
return result.inserted_ids
|
def plot_site(fignum, SiteRec, data, key):
"""
deprecated (used in ipmag)
"""
print('Site mean data: ')
print(' dec inc n_lines n_planes kappa R alpha_95 comp coord')
print(SiteRec['site_dec'], SiteRec['site_inc'], SiteRec['site_n_lines'], SiteRec['site_n_planes'], SiteRec['site_k'],
SiteRec['site_r'], SiteRec['site_alpha95'], SiteRec['site_comp_name'], SiteRec['site_tilt_correction'])
print('sample/specimen, dec, inc, n_specs/a95,| method codes ')
for i in range(len(data)):
print('%s: %s %s %s / %s | %s' % (data[i]['er_' + key + '_name'], data[i][key + '_dec'], data[i]
[key + '_inc'], data[i][key + '_n'], data[i][key + '_alpha95'], data[i]['magic_method_codes']))
plot_slnp(fignum, SiteRec, data, key)
plot = input("s[a]ve plot, [q]uit or <return> to continue: ")
if plot == 'q':
print("CUL8R")
sys.exit()
if plot == 'a':
files = {}
for key in list(EQ.keys()):
files[key] = site + '_' + key + '.' + fmt
save_plots(EQ, files)
|
def function[plot_site, parameter[fignum, SiteRec, data, key]]:
constant[
deprecated (used in ipmag)
]
call[name[print], parameter[constant[Site mean data: ]]]
call[name[print], parameter[constant[ dec inc n_lines n_planes kappa R alpha_95 comp coord]]]
call[name[print], parameter[call[name[SiteRec]][constant[site_dec]], call[name[SiteRec]][constant[site_inc]], call[name[SiteRec]][constant[site_n_lines]], call[name[SiteRec]][constant[site_n_planes]], call[name[SiteRec]][constant[site_k]], call[name[SiteRec]][constant[site_r]], call[name[SiteRec]][constant[site_alpha95]], call[name[SiteRec]][constant[site_comp_name]], call[name[SiteRec]][constant[site_tilt_correction]]]]
call[name[print], parameter[constant[sample/specimen, dec, inc, n_specs/a95,| method codes ]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[data]]]]]] begin[:]
call[name[print], parameter[binary_operation[constant[%s: %s %s %s / %s | %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b0510b80>, <ast.Subscript object at 0x7da1b0510ee0>, <ast.Subscript object at 0x7da1b0537850>, <ast.Subscript object at 0x7da1b0535b70>, <ast.Subscript object at 0x7da1b0537c70>, <ast.Subscript object at 0x7da1b0535990>]]]]]
call[name[plot_slnp], parameter[name[fignum], name[SiteRec], name[data], name[key]]]
variable[plot] assign[=] call[name[input], parameter[constant[s[a]ve plot, [q]uit or <return> to continue: ]]]
if compare[name[plot] equal[==] constant[q]] begin[:]
call[name[print], parameter[constant[CUL8R]]]
call[name[sys].exit, parameter[]]
if compare[name[plot] equal[==] constant[a]] begin[:]
variable[files] assign[=] dictionary[[], []]
for taget[name[key]] in starred[call[name[list], parameter[call[name[EQ].keys, parameter[]]]]] begin[:]
call[name[files]][name[key]] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[site] + constant[_]] + name[key]] + constant[.]] + name[fmt]]
call[name[save_plots], parameter[name[EQ], name[files]]]
|
keyword[def] identifier[plot_site] ( identifier[fignum] , identifier[SiteRec] , identifier[data] , identifier[key] ):
literal[string]
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
identifier[print] ( identifier[SiteRec] [ literal[string] ], identifier[SiteRec] [ literal[string] ], identifier[SiteRec] [ literal[string] ], identifier[SiteRec] [ literal[string] ], identifier[SiteRec] [ literal[string] ],
identifier[SiteRec] [ literal[string] ], identifier[SiteRec] [ literal[string] ], identifier[SiteRec] [ literal[string] ], identifier[SiteRec] [ literal[string] ])
identifier[print] ( literal[string] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[data] )):
identifier[print] ( literal[string] %( identifier[data] [ identifier[i] ][ literal[string] + identifier[key] + literal[string] ], identifier[data] [ identifier[i] ][ identifier[key] + literal[string] ], identifier[data] [ identifier[i] ]
[ identifier[key] + literal[string] ], identifier[data] [ identifier[i] ][ identifier[key] + literal[string] ], identifier[data] [ identifier[i] ][ identifier[key] + literal[string] ], identifier[data] [ identifier[i] ][ literal[string] ]))
identifier[plot_slnp] ( identifier[fignum] , identifier[SiteRec] , identifier[data] , identifier[key] )
identifier[plot] = identifier[input] ( literal[string] )
keyword[if] identifier[plot] == literal[string] :
identifier[print] ( literal[string] )
identifier[sys] . identifier[exit] ()
keyword[if] identifier[plot] == literal[string] :
identifier[files] ={}
keyword[for] identifier[key] keyword[in] identifier[list] ( identifier[EQ] . identifier[keys] ()):
identifier[files] [ identifier[key] ]= identifier[site] + literal[string] + identifier[key] + literal[string] + identifier[fmt]
identifier[save_plots] ( identifier[EQ] , identifier[files] )
|
def plot_site(fignum, SiteRec, data, key):
"""
deprecated (used in ipmag)
"""
print('Site mean data: ')
print(' dec inc n_lines n_planes kappa R alpha_95 comp coord')
print(SiteRec['site_dec'], SiteRec['site_inc'], SiteRec['site_n_lines'], SiteRec['site_n_planes'], SiteRec['site_k'], SiteRec['site_r'], SiteRec['site_alpha95'], SiteRec['site_comp_name'], SiteRec['site_tilt_correction'])
print('sample/specimen, dec, inc, n_specs/a95,| method codes ')
for i in range(len(data)):
print('%s: %s %s %s / %s | %s' % (data[i]['er_' + key + '_name'], data[i][key + '_dec'], data[i][key + '_inc'], data[i][key + '_n'], data[i][key + '_alpha95'], data[i]['magic_method_codes'])) # depends on [control=['for'], data=['i']]
plot_slnp(fignum, SiteRec, data, key)
plot = input('s[a]ve plot, [q]uit or <return> to continue: ')
if plot == 'q':
print('CUL8R')
sys.exit() # depends on [control=['if'], data=[]]
if plot == 'a':
files = {}
for key in list(EQ.keys()):
files[key] = site + '_' + key + '.' + fmt # depends on [control=['for'], data=['key']]
save_plots(EQ, files) # depends on [control=['if'], data=[]]
|
def key(self, user="", areq=None):
"""
Return a key (the session id)
:param user: User id
:param areq: The authorization request
:return: An ID
"""
csum = hashlib.new('sha224')
csum.update(rndstr(32).encode('utf-8'))
return csum.hexdigest()
|
def function[key, parameter[self, user, areq]]:
constant[
Return a key (the session id)
:param user: User id
:param areq: The authorization request
:return: An ID
]
variable[csum] assign[=] call[name[hashlib].new, parameter[constant[sha224]]]
call[name[csum].update, parameter[call[call[name[rndstr], parameter[constant[32]]].encode, parameter[constant[utf-8]]]]]
return[call[name[csum].hexdigest, parameter[]]]
|
keyword[def] identifier[key] ( identifier[self] , identifier[user] = literal[string] , identifier[areq] = keyword[None] ):
literal[string]
identifier[csum] = identifier[hashlib] . identifier[new] ( literal[string] )
identifier[csum] . identifier[update] ( identifier[rndstr] ( literal[int] ). identifier[encode] ( literal[string] ))
keyword[return] identifier[csum] . identifier[hexdigest] ()
|
def key(self, user='', areq=None):
"""
Return a key (the session id)
:param user: User id
:param areq: The authorization request
:return: An ID
"""
csum = hashlib.new('sha224')
csum.update(rndstr(32).encode('utf-8'))
return csum.hexdigest()
|
def abusecheck(self, send, nick, target, limit, cmd):
""" Rate-limits commands.
| If a nick uses commands with the limit attr set, record the time
| at which they were used.
| If the command is used more than `limit` times in a
| minute, ignore the nick.
"""
if nick not in self.abuselist:
self.abuselist[nick] = {}
if cmd not in self.abuselist[nick]:
self.abuselist[nick][cmd] = [datetime.now()]
else:
self.abuselist[nick][cmd].append(datetime.now())
count = 0
for x in self.abuselist[nick][cmd]:
# 60 seconds - arbitrary cuttoff
if datetime.now() - x < timedelta(seconds=60):
count = count + 1
if count > limit:
msg = "%s: don't abuse scores!" if cmd == 'scores' else "%s: stop abusing the bot!"
send(msg % nick, target=target)
with self.db.session_scope() as session:
send(misc.ignore(session, nick))
return True
|
def function[abusecheck, parameter[self, send, nick, target, limit, cmd]]:
constant[ Rate-limits commands.
| If a nick uses commands with the limit attr set, record the time
| at which they were used.
| If the command is used more than `limit` times in a
| minute, ignore the nick.
]
if compare[name[nick] <ast.NotIn object at 0x7da2590d7190> name[self].abuselist] begin[:]
call[name[self].abuselist][name[nick]] assign[=] dictionary[[], []]
if compare[name[cmd] <ast.NotIn object at 0x7da2590d7190> call[name[self].abuselist][name[nick]]] begin[:]
call[call[name[self].abuselist][name[nick]]][name[cmd]] assign[=] list[[<ast.Call object at 0x7da1b20d5ba0>]]
variable[count] assign[=] constant[0]
for taget[name[x]] in starred[call[call[name[self].abuselist][name[nick]]][name[cmd]]] begin[:]
if compare[binary_operation[call[name[datetime].now, parameter[]] - name[x]] less[<] call[name[timedelta], parameter[]]] begin[:]
variable[count] assign[=] binary_operation[name[count] + constant[1]]
if compare[name[count] greater[>] name[limit]] begin[:]
variable[msg] assign[=] <ast.IfExp object at 0x7da1b20d64d0>
call[name[send], parameter[binary_operation[name[msg] <ast.Mod object at 0x7da2590d6920> name[nick]]]]
with call[name[self].db.session_scope, parameter[]] begin[:]
call[name[send], parameter[call[name[misc].ignore, parameter[name[session], name[nick]]]]]
return[constant[True]]
|
keyword[def] identifier[abusecheck] ( identifier[self] , identifier[send] , identifier[nick] , identifier[target] , identifier[limit] , identifier[cmd] ):
literal[string]
keyword[if] identifier[nick] keyword[not] keyword[in] identifier[self] . identifier[abuselist] :
identifier[self] . identifier[abuselist] [ identifier[nick] ]={}
keyword[if] identifier[cmd] keyword[not] keyword[in] identifier[self] . identifier[abuselist] [ identifier[nick] ]:
identifier[self] . identifier[abuselist] [ identifier[nick] ][ identifier[cmd] ]=[ identifier[datetime] . identifier[now] ()]
keyword[else] :
identifier[self] . identifier[abuselist] [ identifier[nick] ][ identifier[cmd] ]. identifier[append] ( identifier[datetime] . identifier[now] ())
identifier[count] = literal[int]
keyword[for] identifier[x] keyword[in] identifier[self] . identifier[abuselist] [ identifier[nick] ][ identifier[cmd] ]:
keyword[if] identifier[datetime] . identifier[now] ()- identifier[x] < identifier[timedelta] ( identifier[seconds] = literal[int] ):
identifier[count] = identifier[count] + literal[int]
keyword[if] identifier[count] > identifier[limit] :
identifier[msg] = literal[string] keyword[if] identifier[cmd] == literal[string] keyword[else] literal[string]
identifier[send] ( identifier[msg] % identifier[nick] , identifier[target] = identifier[target] )
keyword[with] identifier[self] . identifier[db] . identifier[session_scope] () keyword[as] identifier[session] :
identifier[send] ( identifier[misc] . identifier[ignore] ( identifier[session] , identifier[nick] ))
keyword[return] keyword[True]
|
def abusecheck(self, send, nick, target, limit, cmd):
""" Rate-limits commands.
| If a nick uses commands with the limit attr set, record the time
| at which they were used.
| If the command is used more than `limit` times in a
| minute, ignore the nick.
"""
if nick not in self.abuselist:
self.abuselist[nick] = {} # depends on [control=['if'], data=['nick']]
if cmd not in self.abuselist[nick]:
self.abuselist[nick][cmd] = [datetime.now()] # depends on [control=['if'], data=['cmd']]
else:
self.abuselist[nick][cmd].append(datetime.now())
count = 0
for x in self.abuselist[nick][cmd]:
# 60 seconds - arbitrary cuttoff
if datetime.now() - x < timedelta(seconds=60):
count = count + 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']]
if count > limit:
msg = "%s: don't abuse scores!" if cmd == 'scores' else '%s: stop abusing the bot!'
send(msg % nick, target=target)
with self.db.session_scope() as session:
send(misc.ignore(session, nick)) # depends on [control=['with'], data=['session']]
return True # depends on [control=['if'], data=[]]
|
def analyze(problem, X, Y, num_resamples=1000,
conf_level=0.95, print_to_console=False, seed=None):
"""Calculates Derivative-based Global Sensitivity Measure on model outputs.
Returns a dictionary with keys 'vi', 'vi_std', 'dgsm', and 'dgsm_conf',
where each entry is a list of size D (the number of parameters) containing
the indices in the same order as the parameter file.
Parameters
----------
problem : dict
The problem definition
X : numpy.matrix
The NumPy matrix containing the model inputs
Y : numpy.array
The NumPy array containing the model outputs
num_resamples : int
The number of resamples used to compute the confidence
intervals (default 1000)
conf_level : float
The confidence interval level (default 0.95)
print_to_console : bool
Print results directly to console (default False)
References
----------
.. [1] Sobol, I. M. and S. Kucherenko (2009). "Derivative based global
sensitivity measures and their link with global sensitivity
indices." Mathematics and Computers in Simulation, 79(10):3009-3017,
doi:10.1016/j.matcom.2009.01.023.
"""
if seed:
np.random.seed(seed)
D = problem['num_vars']
if Y.size % (D + 1) == 0:
N = int(Y.size / (D + 1))
else:
raise RuntimeError("Incorrect number of samples in model output file.")
if not 0 < conf_level < 1:
raise RuntimeError("Confidence level must be between 0-1.")
base = np.zeros(N)
X_base = np.zeros((N, D))
perturbed = np.zeros((N, D))
X_perturbed = np.zeros((N, D))
step = D + 1
base = Y[0:Y.size:step]
X_base = X[0:Y.size:step, :]
for j in range(D):
perturbed[:, j] = Y[(j + 1):Y.size:step]
X_perturbed[:, j] = X[(j + 1):Y.size:step, j]
# First order (+conf.) and Total order (+conf.)
keys = ('vi', 'vi_std', 'dgsm', 'dgsm_conf')
S = ResultDict((k, np.zeros(D)) for k in keys)
S['names'] = problem['names']
if print_to_console:
print("Parameter %s %s %s %s" % keys)
for j in range(D):
S['vi'][j], S['vi_std'][j] = calc_vi(
base, perturbed[:, j], X_perturbed[:, j] - X_base[:, j])
S['dgsm'][j], S['dgsm_conf'][j] = calc_dgsm(base, perturbed[:, j], X_perturbed[
:, j] - X_base[:, j], problem['bounds'][j], num_resamples, conf_level)
if print_to_console:
print("%s %f %f %f %f" % (
problem['names'][j], S['vi'][j], S['vi_std'][j], S['dgsm'][j], S['dgsm_conf'][j]))
return S
|
def function[analyze, parameter[problem, X, Y, num_resamples, conf_level, print_to_console, seed]]:
constant[Calculates Derivative-based Global Sensitivity Measure on model outputs.
Returns a dictionary with keys 'vi', 'vi_std', 'dgsm', and 'dgsm_conf',
where each entry is a list of size D (the number of parameters) containing
the indices in the same order as the parameter file.
Parameters
----------
problem : dict
The problem definition
X : numpy.matrix
The NumPy matrix containing the model inputs
Y : numpy.array
The NumPy array containing the model outputs
num_resamples : int
The number of resamples used to compute the confidence
intervals (default 1000)
conf_level : float
The confidence interval level (default 0.95)
print_to_console : bool
Print results directly to console (default False)
References
----------
.. [1] Sobol, I. M. and S. Kucherenko (2009). "Derivative based global
sensitivity measures and their link with global sensitivity
indices." Mathematics and Computers in Simulation, 79(10):3009-3017,
doi:10.1016/j.matcom.2009.01.023.
]
if name[seed] begin[:]
call[name[np].random.seed, parameter[name[seed]]]
variable[D] assign[=] call[name[problem]][constant[num_vars]]
if compare[binary_operation[name[Y].size <ast.Mod object at 0x7da2590d6920> binary_operation[name[D] + constant[1]]] equal[==] constant[0]] begin[:]
variable[N] assign[=] call[name[int], parameter[binary_operation[name[Y].size / binary_operation[name[D] + constant[1]]]]]
if <ast.UnaryOp object at 0x7da1b163c430> begin[:]
<ast.Raise object at 0x7da1b163fbe0>
variable[base] assign[=] call[name[np].zeros, parameter[name[N]]]
variable[X_base] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b163fd00>, <ast.Name object at 0x7da1b163fb20>]]]]
variable[perturbed] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b1835360>, <ast.Name object at 0x7da1b1834df0>]]]]
variable[X_perturbed] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b1837340>, <ast.Name object at 0x7da1b1837d60>]]]]
variable[step] assign[=] binary_operation[name[D] + constant[1]]
variable[base] assign[=] call[name[Y]][<ast.Slice object at 0x7da1b1837370>]
variable[X_base] assign[=] call[name[X]][tuple[[<ast.Slice object at 0x7da1b1834130>, <ast.Slice object at 0x7da1b1834400>]]]
for taget[name[j]] in starred[call[name[range], parameter[name[D]]]] begin[:]
call[name[perturbed]][tuple[[<ast.Slice object at 0x7da1b1837ac0>, <ast.Name object at 0x7da1b1837610>]]] assign[=] call[name[Y]][<ast.Slice object at 0x7da1b18378e0>]
call[name[X_perturbed]][tuple[[<ast.Slice object at 0x7da1b18346a0>, <ast.Name object at 0x7da1b1834850>]]] assign[=] call[name[X]][tuple[[<ast.Slice object at 0x7da1b18357e0>, <ast.Name object at 0x7da1b1836680>]]]
variable[keys] assign[=] tuple[[<ast.Constant object at 0x7da1b1834550>, <ast.Constant object at 0x7da1b1834520>, <ast.Constant object at 0x7da1b18355a0>, <ast.Constant object at 0x7da1b1836860>]]
variable[S] assign[=] call[name[ResultDict], parameter[<ast.GeneratorExp object at 0x7da1b1836830>]]
call[name[S]][constant[names]] assign[=] call[name[problem]][constant[names]]
if name[print_to_console] begin[:]
call[name[print], parameter[binary_operation[constant[Parameter %s %s %s %s] <ast.Mod object at 0x7da2590d6920> name[keys]]]]
for taget[name[j]] in starred[call[name[range], parameter[name[D]]]] begin[:]
<ast.Tuple object at 0x7da1b18a0cd0> assign[=] call[name[calc_vi], parameter[name[base], call[name[perturbed]][tuple[[<ast.Slice object at 0x7da1b18a2da0>, <ast.Name object at 0x7da1b18a3ca0>]]], binary_operation[call[name[X_perturbed]][tuple[[<ast.Slice object at 0x7da1b18a2500>, <ast.Name object at 0x7da1b18a3490>]]] - call[name[X_base]][tuple[[<ast.Slice object at 0x7da1b18a2ef0>, <ast.Name object at 0x7da1b18a2200>]]]]]]
<ast.Tuple object at 0x7da1b18a1150> assign[=] call[name[calc_dgsm], parameter[name[base], call[name[perturbed]][tuple[[<ast.Slice object at 0x7da1b18a1c30>, <ast.Name object at 0x7da1b18a11e0>]]], binary_operation[call[name[X_perturbed]][tuple[[<ast.Slice object at 0x7da1b18a1030>, <ast.Name object at 0x7da1b18a0160>]]] - call[name[X_base]][tuple[[<ast.Slice object at 0x7da1b18a0190>, <ast.Name object at 0x7da1b18a1510>]]]], call[call[name[problem]][constant[bounds]]][name[j]], name[num_resamples], name[conf_level]]]
if name[print_to_console] begin[:]
call[name[print], parameter[binary_operation[constant[%s %f %f %f %f] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b18a39d0>, <ast.Subscript object at 0x7da1b18a1270>, <ast.Subscript object at 0x7da1b18a24d0>, <ast.Subscript object at 0x7da1b18a2b90>, <ast.Subscript object at 0x7da1b18a37f0>]]]]]
return[name[S]]
|
keyword[def] identifier[analyze] ( identifier[problem] , identifier[X] , identifier[Y] , identifier[num_resamples] = literal[int] ,
identifier[conf_level] = literal[int] , identifier[print_to_console] = keyword[False] , identifier[seed] = keyword[None] ):
literal[string]
keyword[if] identifier[seed] :
identifier[np] . identifier[random] . identifier[seed] ( identifier[seed] )
identifier[D] = identifier[problem] [ literal[string] ]
keyword[if] identifier[Y] . identifier[size] %( identifier[D] + literal[int] )== literal[int] :
identifier[N] = identifier[int] ( identifier[Y] . identifier[size] /( identifier[D] + literal[int] ))
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
keyword[if] keyword[not] literal[int] < identifier[conf_level] < literal[int] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
identifier[base] = identifier[np] . identifier[zeros] ( identifier[N] )
identifier[X_base] = identifier[np] . identifier[zeros] (( identifier[N] , identifier[D] ))
identifier[perturbed] = identifier[np] . identifier[zeros] (( identifier[N] , identifier[D] ))
identifier[X_perturbed] = identifier[np] . identifier[zeros] (( identifier[N] , identifier[D] ))
identifier[step] = identifier[D] + literal[int]
identifier[base] = identifier[Y] [ literal[int] : identifier[Y] . identifier[size] : identifier[step] ]
identifier[X_base] = identifier[X] [ literal[int] : identifier[Y] . identifier[size] : identifier[step] ,:]
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[D] ):
identifier[perturbed] [:, identifier[j] ]= identifier[Y] [( identifier[j] + literal[int] ): identifier[Y] . identifier[size] : identifier[step] ]
identifier[X_perturbed] [:, identifier[j] ]= identifier[X] [( identifier[j] + literal[int] ): identifier[Y] . identifier[size] : identifier[step] , identifier[j] ]
identifier[keys] =( literal[string] , literal[string] , literal[string] , literal[string] )
identifier[S] = identifier[ResultDict] (( identifier[k] , identifier[np] . identifier[zeros] ( identifier[D] )) keyword[for] identifier[k] keyword[in] identifier[keys] )
identifier[S] [ literal[string] ]= identifier[problem] [ literal[string] ]
keyword[if] identifier[print_to_console] :
identifier[print] ( literal[string] % identifier[keys] )
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[D] ):
identifier[S] [ literal[string] ][ identifier[j] ], identifier[S] [ literal[string] ][ identifier[j] ]= identifier[calc_vi] (
identifier[base] , identifier[perturbed] [:, identifier[j] ], identifier[X_perturbed] [:, identifier[j] ]- identifier[X_base] [:, identifier[j] ])
identifier[S] [ literal[string] ][ identifier[j] ], identifier[S] [ literal[string] ][ identifier[j] ]= identifier[calc_dgsm] ( identifier[base] , identifier[perturbed] [:, identifier[j] ], identifier[X_perturbed] [
:, identifier[j] ]- identifier[X_base] [:, identifier[j] ], identifier[problem] [ literal[string] ][ identifier[j] ], identifier[num_resamples] , identifier[conf_level] )
keyword[if] identifier[print_to_console] :
identifier[print] ( literal[string] %(
identifier[problem] [ literal[string] ][ identifier[j] ], identifier[S] [ literal[string] ][ identifier[j] ], identifier[S] [ literal[string] ][ identifier[j] ], identifier[S] [ literal[string] ][ identifier[j] ], identifier[S] [ literal[string] ][ identifier[j] ]))
keyword[return] identifier[S]
|
def analyze(problem, X, Y, num_resamples=1000, conf_level=0.95, print_to_console=False, seed=None):
"""Calculates Derivative-based Global Sensitivity Measure on model outputs.
Returns a dictionary with keys 'vi', 'vi_std', 'dgsm', and 'dgsm_conf',
where each entry is a list of size D (the number of parameters) containing
the indices in the same order as the parameter file.
Parameters
----------
problem : dict
The problem definition
X : numpy.matrix
The NumPy matrix containing the model inputs
Y : numpy.array
The NumPy array containing the model outputs
num_resamples : int
The number of resamples used to compute the confidence
intervals (default 1000)
conf_level : float
The confidence interval level (default 0.95)
print_to_console : bool
Print results directly to console (default False)
References
----------
.. [1] Sobol, I. M. and S. Kucherenko (2009). "Derivative based global
sensitivity measures and their link with global sensitivity
indices." Mathematics and Computers in Simulation, 79(10):3009-3017,
doi:10.1016/j.matcom.2009.01.023.
"""
if seed:
np.random.seed(seed) # depends on [control=['if'], data=[]]
D = problem['num_vars']
if Y.size % (D + 1) == 0:
N = int(Y.size / (D + 1)) # depends on [control=['if'], data=[]]
else:
raise RuntimeError('Incorrect number of samples in model output file.')
if not 0 < conf_level < 1:
raise RuntimeError('Confidence level must be between 0-1.') # depends on [control=['if'], data=[]]
base = np.zeros(N)
X_base = np.zeros((N, D))
perturbed = np.zeros((N, D))
X_perturbed = np.zeros((N, D))
step = D + 1
base = Y[0:Y.size:step]
X_base = X[0:Y.size:step, :]
for j in range(D):
perturbed[:, j] = Y[j + 1:Y.size:step]
X_perturbed[:, j] = X[j + 1:Y.size:step, j] # depends on [control=['for'], data=['j']] # First order (+conf.) and Total order (+conf.)
keys = ('vi', 'vi_std', 'dgsm', 'dgsm_conf')
S = ResultDict(((k, np.zeros(D)) for k in keys))
S['names'] = problem['names']
if print_to_console:
print('Parameter %s %s %s %s' % keys) # depends on [control=['if'], data=[]]
for j in range(D):
(S['vi'][j], S['vi_std'][j]) = calc_vi(base, perturbed[:, j], X_perturbed[:, j] - X_base[:, j])
(S['dgsm'][j], S['dgsm_conf'][j]) = calc_dgsm(base, perturbed[:, j], X_perturbed[:, j] - X_base[:, j], problem['bounds'][j], num_resamples, conf_level)
if print_to_console:
print('%s %f %f %f %f' % (problem['names'][j], S['vi'][j], S['vi_std'][j], S['dgsm'][j], S['dgsm_conf'][j])) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']]
return S
|
def summarize_dist_params(dist, name, name_scope="dist_params"):
"""Summarize the parameters of a distribution.
Args:
dist: A Distribution object with mean and standard deviation
parameters.
name: The name of the distribution.
name_scope: The name scope of this summary.
"""
with tf.compat.v1.name_scope(name_scope):
tf.compat.v2.summary.histogram(
name="{}/{}".format(name, "mean"),
data=dist.mean(),
step=tf.compat.v1.train.get_or_create_global_step())
tf.compat.v2.summary.histogram(
name="{}/{}".format(name, "stddev"),
data=dist.stddev(),
step=tf.compat.v1.train.get_or_create_global_step())
|
def function[summarize_dist_params, parameter[dist, name, name_scope]]:
constant[Summarize the parameters of a distribution.
Args:
dist: A Distribution object with mean and standard deviation
parameters.
name: The name of the distribution.
name_scope: The name scope of this summary.
]
with call[name[tf].compat.v1.name_scope, parameter[name[name_scope]]] begin[:]
call[name[tf].compat.v2.summary.histogram, parameter[]]
call[name[tf].compat.v2.summary.histogram, parameter[]]
|
keyword[def] identifier[summarize_dist_params] ( identifier[dist] , identifier[name] , identifier[name_scope] = literal[string] ):
literal[string]
keyword[with] identifier[tf] . identifier[compat] . identifier[v1] . identifier[name_scope] ( identifier[name_scope] ):
identifier[tf] . identifier[compat] . identifier[v2] . identifier[summary] . identifier[histogram] (
identifier[name] = literal[string] . identifier[format] ( identifier[name] , literal[string] ),
identifier[data] = identifier[dist] . identifier[mean] (),
identifier[step] = identifier[tf] . identifier[compat] . identifier[v1] . identifier[train] . identifier[get_or_create_global_step] ())
identifier[tf] . identifier[compat] . identifier[v2] . identifier[summary] . identifier[histogram] (
identifier[name] = literal[string] . identifier[format] ( identifier[name] , literal[string] ),
identifier[data] = identifier[dist] . identifier[stddev] (),
identifier[step] = identifier[tf] . identifier[compat] . identifier[v1] . identifier[train] . identifier[get_or_create_global_step] ())
|
def summarize_dist_params(dist, name, name_scope='dist_params'):
"""Summarize the parameters of a distribution.
Args:
dist: A Distribution object with mean and standard deviation
parameters.
name: The name of the distribution.
name_scope: The name scope of this summary.
"""
with tf.compat.v1.name_scope(name_scope):
tf.compat.v2.summary.histogram(name='{}/{}'.format(name, 'mean'), data=dist.mean(), step=tf.compat.v1.train.get_or_create_global_step())
tf.compat.v2.summary.histogram(name='{}/{}'.format(name, 'stddev'), data=dist.stddev(), step=tf.compat.v1.train.get_or_create_global_step()) # depends on [control=['with'], data=[]]
|
def how_many(self):
"""
Ascertain where to start downloading, and how many entries.
"""
if self.linkdates != []:
# What follows is a quick sanity check: if the entry date is in the
# future, this is probably a mistake, and we just count the entry
# date as right now.
if max(self.linkdates) <= list(time.localtime()):
currentdate = max(self.linkdates)
else:
currentdate = list(time.localtime())
print(("This entry has its date set in the future. "
"I will use your current local time as its date "
"instead."),
file=sys.stderr, flush=True)
stop = sys.maxsize
else:
currentdate = [1, 1, 1, 0, 0]
firstsync = self.retrieve_config('firstsync', '1')
if firstsync == 'all':
stop = sys.maxsize
else:
stop = int(firstsync)
return currentdate, stop
|
def function[how_many, parameter[self]]:
constant[
Ascertain where to start downloading, and how many entries.
]
if compare[name[self].linkdates not_equal[!=] list[[]]] begin[:]
if compare[call[name[max], parameter[name[self].linkdates]] less_or_equal[<=] call[name[list], parameter[call[name[time].localtime, parameter[]]]]] begin[:]
variable[currentdate] assign[=] call[name[max], parameter[name[self].linkdates]]
variable[stop] assign[=] name[sys].maxsize
return[tuple[[<ast.Name object at 0x7da2054a5900>, <ast.Name object at 0x7da2054a6e30>]]]
|
keyword[def] identifier[how_many] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[linkdates] !=[]:
keyword[if] identifier[max] ( identifier[self] . identifier[linkdates] )<= identifier[list] ( identifier[time] . identifier[localtime] ()):
identifier[currentdate] = identifier[max] ( identifier[self] . identifier[linkdates] )
keyword[else] :
identifier[currentdate] = identifier[list] ( identifier[time] . identifier[localtime] ())
identifier[print] (( literal[string]
literal[string]
literal[string] ),
identifier[file] = identifier[sys] . identifier[stderr] , identifier[flush] = keyword[True] )
identifier[stop] = identifier[sys] . identifier[maxsize]
keyword[else] :
identifier[currentdate] =[ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ]
identifier[firstsync] = identifier[self] . identifier[retrieve_config] ( literal[string] , literal[string] )
keyword[if] identifier[firstsync] == literal[string] :
identifier[stop] = identifier[sys] . identifier[maxsize]
keyword[else] :
identifier[stop] = identifier[int] ( identifier[firstsync] )
keyword[return] identifier[currentdate] , identifier[stop]
|
def how_many(self):
"""
Ascertain where to start downloading, and how many entries.
"""
if self.linkdates != []:
# What follows is a quick sanity check: if the entry date is in the
# future, this is probably a mistake, and we just count the entry
# date as right now.
if max(self.linkdates) <= list(time.localtime()):
currentdate = max(self.linkdates) # depends on [control=['if'], data=[]]
else:
currentdate = list(time.localtime())
print('This entry has its date set in the future. I will use your current local time as its date instead.', file=sys.stderr, flush=True)
stop = sys.maxsize # depends on [control=['if'], data=[]]
else:
currentdate = [1, 1, 1, 0, 0]
firstsync = self.retrieve_config('firstsync', '1')
if firstsync == 'all':
stop = sys.maxsize # depends on [control=['if'], data=[]]
else:
stop = int(firstsync)
return (currentdate, stop)
|
def apply(
self,
func,
num_splits=None,
other_axis_partition=None,
maintain_partitioning=True,
**kwargs
):
"""Applies func to the object.
See notes in Parent class about this method.
Args:
func: The function to apply.
num_splits: The number of times to split the result object.
other_axis_partition: Another `DaskFrameAxisPartition` object to apply to
func with this one.
Returns:
A list of `DaskFramePartition` objects.
"""
import dask
if num_splits is None:
num_splits = len(self.list_of_blocks)
if other_axis_partition is not None:
return [
DaskFramePartition(dask.delayed(obj))
for obj in deploy_func_between_two_axis_partitions(
self.axis,
func,
num_splits,
len(self.list_of_blocks),
kwargs,
*dask.compute(
*tuple(
self.list_of_blocks + other_axis_partition.list_of_blocks
)
)
)
]
args = [self.axis, func, num_splits, kwargs, maintain_partitioning]
args.extend(dask.compute(*self.list_of_blocks))
return [
DaskFramePartition(dask.delayed(obj)) for obj in deploy_axis_func(*args)
]
|
def function[apply, parameter[self, func, num_splits, other_axis_partition, maintain_partitioning]]:
constant[Applies func to the object.
See notes in Parent class about this method.
Args:
func: The function to apply.
num_splits: The number of times to split the result object.
other_axis_partition: Another `DaskFrameAxisPartition` object to apply to
func with this one.
Returns:
A list of `DaskFramePartition` objects.
]
import module[dask]
if compare[name[num_splits] is constant[None]] begin[:]
variable[num_splits] assign[=] call[name[len], parameter[name[self].list_of_blocks]]
if compare[name[other_axis_partition] is_not constant[None]] begin[:]
return[<ast.ListComp object at 0x7da18f00c3a0>]
variable[args] assign[=] list[[<ast.Attribute object at 0x7da18ede7c70>, <ast.Name object at 0x7da18ede4460>, <ast.Name object at 0x7da18ede5b10>, <ast.Name object at 0x7da18ede50c0>, <ast.Name object at 0x7da18ede6860>]]
call[name[args].extend, parameter[call[name[dask].compute, parameter[<ast.Starred object at 0x7da18ede6170>]]]]
return[<ast.ListComp object at 0x7da18ede4130>]
|
keyword[def] identifier[apply] (
identifier[self] ,
identifier[func] ,
identifier[num_splits] = keyword[None] ,
identifier[other_axis_partition] = keyword[None] ,
identifier[maintain_partitioning] = keyword[True] ,
** identifier[kwargs]
):
literal[string]
keyword[import] identifier[dask]
keyword[if] identifier[num_splits] keyword[is] keyword[None] :
identifier[num_splits] = identifier[len] ( identifier[self] . identifier[list_of_blocks] )
keyword[if] identifier[other_axis_partition] keyword[is] keyword[not] keyword[None] :
keyword[return] [
identifier[DaskFramePartition] ( identifier[dask] . identifier[delayed] ( identifier[obj] ))
keyword[for] identifier[obj] keyword[in] identifier[deploy_func_between_two_axis_partitions] (
identifier[self] . identifier[axis] ,
identifier[func] ,
identifier[num_splits] ,
identifier[len] ( identifier[self] . identifier[list_of_blocks] ),
identifier[kwargs] ,
* identifier[dask] . identifier[compute] (
* identifier[tuple] (
identifier[self] . identifier[list_of_blocks] + identifier[other_axis_partition] . identifier[list_of_blocks]
)
)
)
]
identifier[args] =[ identifier[self] . identifier[axis] , identifier[func] , identifier[num_splits] , identifier[kwargs] , identifier[maintain_partitioning] ]
identifier[args] . identifier[extend] ( identifier[dask] . identifier[compute] (* identifier[self] . identifier[list_of_blocks] ))
keyword[return] [
identifier[DaskFramePartition] ( identifier[dask] . identifier[delayed] ( identifier[obj] )) keyword[for] identifier[obj] keyword[in] identifier[deploy_axis_func] (* identifier[args] )
]
|
def apply(self, func, num_splits=None, other_axis_partition=None, maintain_partitioning=True, **kwargs):
"""Applies func to the object.
See notes in Parent class about this method.
Args:
func: The function to apply.
num_splits: The number of times to split the result object.
other_axis_partition: Another `DaskFrameAxisPartition` object to apply to
func with this one.
Returns:
A list of `DaskFramePartition` objects.
"""
import dask
if num_splits is None:
num_splits = len(self.list_of_blocks) # depends on [control=['if'], data=['num_splits']]
if other_axis_partition is not None:
return [DaskFramePartition(dask.delayed(obj)) for obj in deploy_func_between_two_axis_partitions(self.axis, func, num_splits, len(self.list_of_blocks), kwargs, *dask.compute(*tuple(self.list_of_blocks + other_axis_partition.list_of_blocks)))] # depends on [control=['if'], data=['other_axis_partition']]
args = [self.axis, func, num_splits, kwargs, maintain_partitioning]
args.extend(dask.compute(*self.list_of_blocks))
return [DaskFramePartition(dask.delayed(obj)) for obj in deploy_axis_func(*args)]
|
def main():
"""
NAME
cart_dir.py
DESCRIPTION
converts cartesian coordinates to geomagnetic elements
INPUT (COMMAND LINE ENTRY)
x1 x2 x3
if only two columns, assumes magnitude of unity
OUTPUT
declination inclination magnitude
SYNTAX
cart_dir.py [command line options] [< filename]
OPTIONS
-h prints help message and quits
-i for interactive data entry
-f FILE to specify input filename
-F OFILE to specify output filename (also prints to screen)
"""
ofile=""
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-F' in sys.argv:
ind=sys.argv.index('-F')
ofile=sys.argv[ind+1]
outfile=open(ofile,'w')
if '-i' in sys.argv:
cont=1
while cont==1:
cart=[]
try:
ans=input('X: [ctrl-D to quit] ')
cart.append(float(ans))
ans=input('Y: ')
cart.append(float(ans))
ans=input('Z: ')
cart.append(float(ans))
except:
print("\n Good-bye \n")
sys.exit()
dir= pmag.cart2dir(cart) # send dir to dir2cart and spit out result
print('%7.1f %7.1f %10.3e'%(dir[0],dir[1],dir[2]))
elif '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
inp=numpy.loadtxt(file) # read from a file
else:
inp = numpy.loadtxt(sys.stdin,dtype=numpy.float) # read from standard input
dir=pmag.cart2dir(inp)
if len(dir.shape)==1:
line=dir
print('%7.1f %7.1f %10.3e'%(line[0],line[1],line[2]))
if ofile!="":
outstring='%7.1f %7.1f %10.8e\n' %(line[0],line[1],line[2])
outfile.write(outstring)
else:
for line in dir:
print('%7.1f %7.1f %10.3e'%(line[0],line[1],line[2]))
if ofile!="":
outstring='%7.1f %7.1f %10.8e\n' %(line[0],line[1],line[2])
outfile.write(outstring)
|
def function[main, parameter[]]:
constant[
NAME
cart_dir.py
DESCRIPTION
converts cartesian coordinates to geomagnetic elements
INPUT (COMMAND LINE ENTRY)
x1 x2 x3
if only two columns, assumes magnitude of unity
OUTPUT
declination inclination magnitude
SYNTAX
cart_dir.py [command line options] [< filename]
OPTIONS
-h prints help message and quits
-i for interactive data entry
-f FILE to specify input filename
-F OFILE to specify output filename (also prints to screen)
]
variable[ofile] assign[=] constant[]
if compare[constant[-h] in name[sys].argv] begin[:]
call[name[print], parameter[name[main].__doc__]]
call[name[sys].exit, parameter[]]
if compare[constant[-F] in name[sys].argv] begin[:]
variable[ind] assign[=] call[name[sys].argv.index, parameter[constant[-F]]]
variable[ofile] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]]
variable[outfile] assign[=] call[name[open], parameter[name[ofile], constant[w]]]
if compare[constant[-i] in name[sys].argv] begin[:]
variable[cont] assign[=] constant[1]
while compare[name[cont] equal[==] constant[1]] begin[:]
variable[cart] assign[=] list[[]]
<ast.Try object at 0x7da1b022fdc0>
variable[dir] assign[=] call[name[pmag].cart2dir, parameter[name[cart]]]
call[name[print], parameter[binary_operation[constant[%7.1f %7.1f %10.3e] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b02ebb80>, <ast.Subscript object at 0x7da1b02eb340>, <ast.Subscript object at 0x7da1b02e8a00>]]]]]
variable[dir] assign[=] call[name[pmag].cart2dir, parameter[name[inp]]]
if compare[call[name[len], parameter[name[dir].shape]] equal[==] constant[1]] begin[:]
variable[line] assign[=] name[dir]
call[name[print], parameter[binary_operation[constant[%7.1f %7.1f %10.3e] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b02e9990>, <ast.Subscript object at 0x7da1b02e96f0>, <ast.Subscript object at 0x7da1b02e9960>]]]]]
if compare[name[ofile] not_equal[!=] constant[]] begin[:]
variable[outstring] assign[=] binary_operation[constant[%7.1f %7.1f %10.8e
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b02e9f60>, <ast.Subscript object at 0x7da1b02ea0b0>, <ast.Subscript object at 0x7da1b02e9540>]]]
call[name[outfile].write, parameter[name[outstring]]]
|
keyword[def] identifier[main] ():
literal[string]
identifier[ofile] = literal[string]
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[print] ( identifier[main] . identifier[__doc__] )
identifier[sys] . identifier[exit] ()
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] )
identifier[ofile] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
identifier[outfile] = identifier[open] ( identifier[ofile] , literal[string] )
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[cont] = literal[int]
keyword[while] identifier[cont] == literal[int] :
identifier[cart] =[]
keyword[try] :
identifier[ans] = identifier[input] ( literal[string] )
identifier[cart] . identifier[append] ( identifier[float] ( identifier[ans] ))
identifier[ans] = identifier[input] ( literal[string] )
identifier[cart] . identifier[append] ( identifier[float] ( identifier[ans] ))
identifier[ans] = identifier[input] ( literal[string] )
identifier[cart] . identifier[append] ( identifier[float] ( identifier[ans] ))
keyword[except] :
identifier[print] ( literal[string] )
identifier[sys] . identifier[exit] ()
identifier[dir] = identifier[pmag] . identifier[cart2dir] ( identifier[cart] )
identifier[print] ( literal[string] %( identifier[dir] [ literal[int] ], identifier[dir] [ literal[int] ], identifier[dir] [ literal[int] ]))
keyword[elif] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] )
identifier[file] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
identifier[inp] = identifier[numpy] . identifier[loadtxt] ( identifier[file] )
keyword[else] :
identifier[inp] = identifier[numpy] . identifier[loadtxt] ( identifier[sys] . identifier[stdin] , identifier[dtype] = identifier[numpy] . identifier[float] )
identifier[dir] = identifier[pmag] . identifier[cart2dir] ( identifier[inp] )
keyword[if] identifier[len] ( identifier[dir] . identifier[shape] )== literal[int] :
identifier[line] = identifier[dir]
identifier[print] ( literal[string] %( identifier[line] [ literal[int] ], identifier[line] [ literal[int] ], identifier[line] [ literal[int] ]))
keyword[if] identifier[ofile] != literal[string] :
identifier[outstring] = literal[string] %( identifier[line] [ literal[int] ], identifier[line] [ literal[int] ], identifier[line] [ literal[int] ])
identifier[outfile] . identifier[write] ( identifier[outstring] )
keyword[else] :
keyword[for] identifier[line] keyword[in] identifier[dir] :
identifier[print] ( literal[string] %( identifier[line] [ literal[int] ], identifier[line] [ literal[int] ], identifier[line] [ literal[int] ]))
keyword[if] identifier[ofile] != literal[string] :
identifier[outstring] = literal[string] %( identifier[line] [ literal[int] ], identifier[line] [ literal[int] ], identifier[line] [ literal[int] ])
identifier[outfile] . identifier[write] ( identifier[outstring] )
|
def main():
"""
NAME
cart_dir.py
DESCRIPTION
converts cartesian coordinates to geomagnetic elements
INPUT (COMMAND LINE ENTRY)
x1 x2 x3
if only two columns, assumes magnitude of unity
OUTPUT
declination inclination magnitude
SYNTAX
cart_dir.py [command line options] [< filename]
OPTIONS
-h prints help message and quits
-i for interactive data entry
-f FILE to specify input filename
-F OFILE to specify output filename (also prints to screen)
"""
ofile = ''
if '-h' in sys.argv:
print(main.__doc__)
sys.exit() # depends on [control=['if'], data=[]]
if '-F' in sys.argv:
ind = sys.argv.index('-F')
ofile = sys.argv[ind + 1]
outfile = open(ofile, 'w') # depends on [control=['if'], data=[]]
if '-i' in sys.argv:
cont = 1
while cont == 1:
cart = []
try:
ans = input('X: [ctrl-D to quit] ')
cart.append(float(ans))
ans = input('Y: ')
cart.append(float(ans))
ans = input('Z: ')
cart.append(float(ans)) # depends on [control=['try'], data=[]]
except:
print('\n Good-bye \n')
sys.exit() # depends on [control=['except'], data=[]]
dir = pmag.cart2dir(cart) # send dir to dir2cart and spit out result
print('%7.1f %7.1f %10.3e' % (dir[0], dir[1], dir[2])) # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]]
elif '-f' in sys.argv:
ind = sys.argv.index('-f')
file = sys.argv[ind + 1]
inp = numpy.loadtxt(file) # read from a file # depends on [control=['if'], data=[]]
else:
inp = numpy.loadtxt(sys.stdin, dtype=numpy.float) # read from standard input
dir = pmag.cart2dir(inp)
if len(dir.shape) == 1:
line = dir
print('%7.1f %7.1f %10.3e' % (line[0], line[1], line[2]))
if ofile != '':
outstring = '%7.1f %7.1f %10.8e\n' % (line[0], line[1], line[2])
outfile.write(outstring) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
for line in dir:
print('%7.1f %7.1f %10.3e' % (line[0], line[1], line[2]))
if ofile != '':
outstring = '%7.1f %7.1f %10.8e\n' % (line[0], line[1], line[2])
outfile.write(outstring) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
|
def __is_file_to_be_busted(self, filepath):
"""
:param filepath:
:return: True or False
"""
if not self.extensions:
return True
return Path(filepath).suffix in self.extensions if filepath else False
|
def function[__is_file_to_be_busted, parameter[self, filepath]]:
constant[
:param filepath:
:return: True or False
]
if <ast.UnaryOp object at 0x7da1b25f6380> begin[:]
return[constant[True]]
return[<ast.IfExp object at 0x7da1b25f62f0>]
|
keyword[def] identifier[__is_file_to_be_busted] ( identifier[self] , identifier[filepath] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[extensions] :
keyword[return] keyword[True]
keyword[return] identifier[Path] ( identifier[filepath] ). identifier[suffix] keyword[in] identifier[self] . identifier[extensions] keyword[if] identifier[filepath] keyword[else] keyword[False]
|
def __is_file_to_be_busted(self, filepath):
"""
:param filepath:
:return: True or False
"""
if not self.extensions:
return True # depends on [control=['if'], data=[]]
return Path(filepath).suffix in self.extensions if filepath else False
|
def transDrift(length=0.0, gamma=None):
""" Transport matrix of drift
:param length: drift length in [m]
:param gamma: electron energy, gamma value
:return: 6x6 numpy array
"""
m = np.eye(6, 6, dtype=np.float64)
if length == 0.0:
print("warning: 'length' should be a positive float number.")
elif gamma is not None and gamma != 0.0:
m[0, 1] = m[2, 3] = length
m[4, 5] = float(length) / gamma / gamma
else:
print("warning: 'gamma' should be a positive float number.")
return m
|
def function[transDrift, parameter[length, gamma]]:
constant[ Transport matrix of drift
:param length: drift length in [m]
:param gamma: electron energy, gamma value
:return: 6x6 numpy array
]
variable[m] assign[=] call[name[np].eye, parameter[constant[6], constant[6]]]
if compare[name[length] equal[==] constant[0.0]] begin[:]
call[name[print], parameter[constant[warning: 'length' should be a positive float number.]]]
return[name[m]]
|
keyword[def] identifier[transDrift] ( identifier[length] = literal[int] , identifier[gamma] = keyword[None] ):
literal[string]
identifier[m] = identifier[np] . identifier[eye] ( literal[int] , literal[int] , identifier[dtype] = identifier[np] . identifier[float64] )
keyword[if] identifier[length] == literal[int] :
identifier[print] ( literal[string] )
keyword[elif] identifier[gamma] keyword[is] keyword[not] keyword[None] keyword[and] identifier[gamma] != literal[int] :
identifier[m] [ literal[int] , literal[int] ]= identifier[m] [ literal[int] , literal[int] ]= identifier[length]
identifier[m] [ literal[int] , literal[int] ]= identifier[float] ( identifier[length] )/ identifier[gamma] / identifier[gamma]
keyword[else] :
identifier[print] ( literal[string] )
keyword[return] identifier[m]
|
def transDrift(length=0.0, gamma=None):
""" Transport matrix of drift
:param length: drift length in [m]
:param gamma: electron energy, gamma value
:return: 6x6 numpy array
"""
m = np.eye(6, 6, dtype=np.float64)
if length == 0.0:
print("warning: 'length' should be a positive float number.") # depends on [control=['if'], data=[]]
elif gamma is not None and gamma != 0.0:
m[0, 1] = m[2, 3] = length
m[4, 5] = float(length) / gamma / gamma # depends on [control=['if'], data=[]]
else:
print("warning: 'gamma' should be a positive float number.")
return m
|
def serialize(self):
"""This function serialize into a simple dict object.
It is used when transferring data to other daemons over the network (http)
Here we directly return all attributes
:return: json representation of a Timeperiod
:rtype: dict
"""
res = super(Notification, self).serialize()
if res['command_call'] is not None:
if not isinstance(res['command_call'], string_types) and \
not isinstance(res['command_call'], dict):
res['command_call'] = res['command_call'].serialize()
return res
|
def function[serialize, parameter[self]]:
constant[This function serialize into a simple dict object.
It is used when transferring data to other daemons over the network (http)
Here we directly return all attributes
:return: json representation of a Timeperiod
:rtype: dict
]
variable[res] assign[=] call[call[name[super], parameter[name[Notification], name[self]]].serialize, parameter[]]
if compare[call[name[res]][constant[command_call]] is_not constant[None]] begin[:]
if <ast.BoolOp object at 0x7da18dc052a0> begin[:]
call[name[res]][constant[command_call]] assign[=] call[call[name[res]][constant[command_call]].serialize, parameter[]]
return[name[res]]
|
keyword[def] identifier[serialize] ( identifier[self] ):
literal[string]
identifier[res] = identifier[super] ( identifier[Notification] , identifier[self] ). identifier[serialize] ()
keyword[if] identifier[res] [ literal[string] ] keyword[is] keyword[not] keyword[None] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[res] [ literal[string] ], identifier[string_types] ) keyword[and] keyword[not] identifier[isinstance] ( identifier[res] [ literal[string] ], identifier[dict] ):
identifier[res] [ literal[string] ]= identifier[res] [ literal[string] ]. identifier[serialize] ()
keyword[return] identifier[res]
|
def serialize(self):
"""This function serialize into a simple dict object.
It is used when transferring data to other daemons over the network (http)
Here we directly return all attributes
:return: json representation of a Timeperiod
:rtype: dict
"""
res = super(Notification, self).serialize()
if res['command_call'] is not None:
if not isinstance(res['command_call'], string_types) and (not isinstance(res['command_call'], dict)):
res['command_call'] = res['command_call'].serialize() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return res
|
def delete_host_from_segment(hostipaddress, networkaddress, auth, url):
"""
:param hostipaddress: str ipv4 address of the target host to be deleted
:param networkaddress: ipv4 network address + subnet bits of target scope
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: String of HTTP response code. Should be 204 is successfull
:rtype: str
"""
host_id = get_host_id(hostipaddress, networkaddress, auth, url)
delete_host = remove_scope_ip(host_id, auth, url)
return delete_host
|
def function[delete_host_from_segment, parameter[hostipaddress, networkaddress, auth, url]]:
constant[
:param hostipaddress: str ipv4 address of the target host to be deleted
:param networkaddress: ipv4 network address + subnet bits of target scope
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: String of HTTP response code. Should be 204 is successfull
:rtype: str
]
variable[host_id] assign[=] call[name[get_host_id], parameter[name[hostipaddress], name[networkaddress], name[auth], name[url]]]
variable[delete_host] assign[=] call[name[remove_scope_ip], parameter[name[host_id], name[auth], name[url]]]
return[name[delete_host]]
|
keyword[def] identifier[delete_host_from_segment] ( identifier[hostipaddress] , identifier[networkaddress] , identifier[auth] , identifier[url] ):
literal[string]
identifier[host_id] = identifier[get_host_id] ( identifier[hostipaddress] , identifier[networkaddress] , identifier[auth] , identifier[url] )
identifier[delete_host] = identifier[remove_scope_ip] ( identifier[host_id] , identifier[auth] , identifier[url] )
keyword[return] identifier[delete_host]
|
def delete_host_from_segment(hostipaddress, networkaddress, auth, url):
"""
:param hostipaddress: str ipv4 address of the target host to be deleted
:param networkaddress: ipv4 network address + subnet bits of target scope
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: String of HTTP response code. Should be 204 is successfull
:rtype: str
"""
host_id = get_host_id(hostipaddress, networkaddress, auth, url)
delete_host = remove_scope_ip(host_id, auth, url)
return delete_host
|
def _set_node_hw_sync_state(self, v, load=False):
"""
Setter method for node_hw_sync_state, mapped from YANG variable /brocade_vcs_rpc/show_vcs/output/vcs_nodes/vcs_node_info/node_hw_sync_state (node-hw-sync-state-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_node_hw_sync_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_node_hw_sync_state() directly.
YANG Description: Node hardware synchronization state
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'node-in-sync': {'value': 4}, u'node-uninitialized': {'value': 2}, u'node-unknown': {'value': 1}, u'node-synchronizing': {'value': 3}, u'node-out-of-sync': {'value': 5}},), is_leaf=True, yang_name="node-hw-sync-state", rest_name="node-hw-sync-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='node-hw-sync-state-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """node_hw_sync_state must be of a type compatible with node-hw-sync-state-type""",
'defined-type': "brocade-vcs:node-hw-sync-state-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'node-in-sync': {'value': 4}, u'node-uninitialized': {'value': 2}, u'node-unknown': {'value': 1}, u'node-synchronizing': {'value': 3}, u'node-out-of-sync': {'value': 5}},), is_leaf=True, yang_name="node-hw-sync-state", rest_name="node-hw-sync-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='node-hw-sync-state-type', is_config=True)""",
})
self.__node_hw_sync_state = t
if hasattr(self, '_set'):
self._set()
|
def function[_set_node_hw_sync_state, parameter[self, v, load]]:
constant[
Setter method for node_hw_sync_state, mapped from YANG variable /brocade_vcs_rpc/show_vcs/output/vcs_nodes/vcs_node_info/node_hw_sync_state (node-hw-sync-state-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_node_hw_sync_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_node_hw_sync_state() directly.
YANG Description: Node hardware synchronization state
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da2041d9f00>
name[self].__node_hw_sync_state assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]]
|
keyword[def] identifier[_set_node_hw_sync_state] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[unicode] , identifier[restriction_type] = literal[string] , identifier[restriction_arg] ={ literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }},), identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[False] , identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__node_hw_sync_state] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] ()
|
def _set_node_hw_sync_state(self, v, load=False):
"""
Setter method for node_hw_sync_state, mapped from YANG variable /brocade_vcs_rpc/show_vcs/output/vcs_nodes/vcs_node_info/node_hw_sync_state (node-hw-sync-state-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_node_hw_sync_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_node_hw_sync_state() directly.
YANG Description: Node hardware synchronization state
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=RestrictedClassType(base_type=unicode, restriction_type='dict_key', restriction_arg={u'node-in-sync': {'value': 4}, u'node-uninitialized': {'value': 2}, u'node-unknown': {'value': 1}, u'node-synchronizing': {'value': 3}, u'node-out-of-sync': {'value': 5}}), is_leaf=True, yang_name='node-hw-sync-state', rest_name='node-hw-sync-state', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-vcs', defining_module='brocade-vcs', yang_type='node-hw-sync-state-type', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'node_hw_sync_state must be of a type compatible with node-hw-sync-state-type', 'defined-type': 'brocade-vcs:node-hw-sync-state-type', 'generated-type': 'YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u\'node-in-sync\': {\'value\': 4}, u\'node-uninitialized\': {\'value\': 2}, u\'node-unknown\': {\'value\': 1}, u\'node-synchronizing\': {\'value\': 3}, u\'node-out-of-sync\': {\'value\': 5}},), is_leaf=True, yang_name="node-hw-sync-state", rest_name="node-hw-sync-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace=\'urn:brocade.com:mgmt:brocade-vcs\', defining_module=\'brocade-vcs\', yang_type=\'node-hw-sync-state-type\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__node_hw_sync_state = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]]
|
def expand(self, tags, clique_scoring_func=None):
"""This is the main function to expand tags into cliques
Args:
tags (list): a list of tags to find the cliques.
clique_scoring_func (func): a function that returns a float
value for the clique
Returns:
list : a list of cliques
"""
lattice = Lattice()
overlapping_spans = []
def end_token_index():
return max([t.get('end_token') for t in overlapping_spans])
for i in xrange(len(tags)):
tag = tags[i]
if len(overlapping_spans) > 0 and end_token_index() >= tag.get('start_token'):
overlapping_spans.append(tag)
elif len(overlapping_spans) > 1:
cliques = list(self._sub_expand(overlapping_spans))
if clique_scoring_func:
cliques = sorted(cliques, key=lambda e: -1 * clique_scoring_func(e))
lattice.append(cliques)
overlapping_spans = [tag]
else:
lattice.append(overlapping_spans)
overlapping_spans = [tag]
if len(overlapping_spans) > 1:
cliques = list(self._sub_expand(overlapping_spans))
if clique_scoring_func:
cliques = sorted(cliques, key=lambda e: -1 * clique_scoring_func(e))
lattice.append(cliques)
else:
lattice.append(overlapping_spans)
return lattice.traverse()
|
def function[expand, parameter[self, tags, clique_scoring_func]]:
constant[This is the main function to expand tags into cliques
Args:
tags (list): a list of tags to find the cliques.
clique_scoring_func (func): a function that returns a float
value for the clique
Returns:
list : a list of cliques
]
variable[lattice] assign[=] call[name[Lattice], parameter[]]
variable[overlapping_spans] assign[=] list[[]]
def function[end_token_index, parameter[]]:
return[call[name[max], parameter[<ast.ListComp object at 0x7da1b0881720>]]]
for taget[name[i]] in starred[call[name[xrange], parameter[call[name[len], parameter[name[tags]]]]]] begin[:]
variable[tag] assign[=] call[name[tags]][name[i]]
if <ast.BoolOp object at 0x7da1b0779f60> begin[:]
call[name[overlapping_spans].append, parameter[name[tag]]]
if compare[call[name[len], parameter[name[overlapping_spans]]] greater[>] constant[1]] begin[:]
variable[cliques] assign[=] call[name[list], parameter[call[name[self]._sub_expand, parameter[name[overlapping_spans]]]]]
if name[clique_scoring_func] begin[:]
variable[cliques] assign[=] call[name[sorted], parameter[name[cliques]]]
call[name[lattice].append, parameter[name[cliques]]]
return[call[name[lattice].traverse, parameter[]]]
|
keyword[def] identifier[expand] ( identifier[self] , identifier[tags] , identifier[clique_scoring_func] = keyword[None] ):
literal[string]
identifier[lattice] = identifier[Lattice] ()
identifier[overlapping_spans] =[]
keyword[def] identifier[end_token_index] ():
keyword[return] identifier[max] ([ identifier[t] . identifier[get] ( literal[string] ) keyword[for] identifier[t] keyword[in] identifier[overlapping_spans] ])
keyword[for] identifier[i] keyword[in] identifier[xrange] ( identifier[len] ( identifier[tags] )):
identifier[tag] = identifier[tags] [ identifier[i] ]
keyword[if] identifier[len] ( identifier[overlapping_spans] )> literal[int] keyword[and] identifier[end_token_index] ()>= identifier[tag] . identifier[get] ( literal[string] ):
identifier[overlapping_spans] . identifier[append] ( identifier[tag] )
keyword[elif] identifier[len] ( identifier[overlapping_spans] )> literal[int] :
identifier[cliques] = identifier[list] ( identifier[self] . identifier[_sub_expand] ( identifier[overlapping_spans] ))
keyword[if] identifier[clique_scoring_func] :
identifier[cliques] = identifier[sorted] ( identifier[cliques] , identifier[key] = keyword[lambda] identifier[e] :- literal[int] * identifier[clique_scoring_func] ( identifier[e] ))
identifier[lattice] . identifier[append] ( identifier[cliques] )
identifier[overlapping_spans] =[ identifier[tag] ]
keyword[else] :
identifier[lattice] . identifier[append] ( identifier[overlapping_spans] )
identifier[overlapping_spans] =[ identifier[tag] ]
keyword[if] identifier[len] ( identifier[overlapping_spans] )> literal[int] :
identifier[cliques] = identifier[list] ( identifier[self] . identifier[_sub_expand] ( identifier[overlapping_spans] ))
keyword[if] identifier[clique_scoring_func] :
identifier[cliques] = identifier[sorted] ( identifier[cliques] , identifier[key] = keyword[lambda] identifier[e] :- literal[int] * identifier[clique_scoring_func] ( identifier[e] ))
identifier[lattice] . identifier[append] ( identifier[cliques] )
keyword[else] :
identifier[lattice] . identifier[append] ( identifier[overlapping_spans] )
keyword[return] identifier[lattice] . identifier[traverse] ()
|
def expand(self, tags, clique_scoring_func=None):
"""This is the main function to expand tags into cliques
Args:
tags (list): a list of tags to find the cliques.
clique_scoring_func (func): a function that returns a float
value for the clique
Returns:
list : a list of cliques
"""
lattice = Lattice()
overlapping_spans = []
def end_token_index():
return max([t.get('end_token') for t in overlapping_spans])
for i in xrange(len(tags)):
tag = tags[i]
if len(overlapping_spans) > 0 and end_token_index() >= tag.get('start_token'):
overlapping_spans.append(tag) # depends on [control=['if'], data=[]]
elif len(overlapping_spans) > 1:
cliques = list(self._sub_expand(overlapping_spans))
if clique_scoring_func:
cliques = sorted(cliques, key=lambda e: -1 * clique_scoring_func(e)) # depends on [control=['if'], data=[]]
lattice.append(cliques)
overlapping_spans = [tag] # depends on [control=['if'], data=[]]
else:
lattice.append(overlapping_spans)
overlapping_spans = [tag] # depends on [control=['for'], data=['i']]
if len(overlapping_spans) > 1:
cliques = list(self._sub_expand(overlapping_spans))
if clique_scoring_func:
cliques = sorted(cliques, key=lambda e: -1 * clique_scoring_func(e)) # depends on [control=['if'], data=[]]
lattice.append(cliques) # depends on [control=['if'], data=[]]
else:
lattice.append(overlapping_spans)
return lattice.traverse()
|
def parse(self, pointer):
"""parse pointer into tokens"""
if isinstance(pointer, Pointer):
return pointer.tokens[:]
elif pointer == '':
return []
tokens = []
staged, _, children = pointer.partition('/')
if staged:
try:
token = StagesToken(staged)
token.last = False
tokens.append(token)
except ValueError:
raise ParseError('pointer must start with / or int', pointer)
if _:
for part in children.split('/'):
part = part.replace('~1', '/')
part = part.replace('~0', '~')
token = ChildToken(part)
token.last = False
tokens.append(token)
return tokens
|
def function[parse, parameter[self, pointer]]:
constant[parse pointer into tokens]
if call[name[isinstance], parameter[name[pointer], name[Pointer]]] begin[:]
return[call[name[pointer].tokens][<ast.Slice object at 0x7da1b236c580>]]
variable[tokens] assign[=] list[[]]
<ast.Tuple object at 0x7da1b236c730> assign[=] call[name[pointer].partition, parameter[constant[/]]]
if name[staged] begin[:]
<ast.Try object at 0x7da1b236c460>
if name[_] begin[:]
for taget[name[part]] in starred[call[name[children].split, parameter[constant[/]]]] begin[:]
variable[part] assign[=] call[name[part].replace, parameter[constant[~1], constant[/]]]
variable[part] assign[=] call[name[part].replace, parameter[constant[~0], constant[~]]]
variable[token] assign[=] call[name[ChildToken], parameter[name[part]]]
name[token].last assign[=] constant[False]
call[name[tokens].append, parameter[name[token]]]
return[name[tokens]]
|
keyword[def] identifier[parse] ( identifier[self] , identifier[pointer] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[pointer] , identifier[Pointer] ):
keyword[return] identifier[pointer] . identifier[tokens] [:]
keyword[elif] identifier[pointer] == literal[string] :
keyword[return] []
identifier[tokens] =[]
identifier[staged] , identifier[_] , identifier[children] = identifier[pointer] . identifier[partition] ( literal[string] )
keyword[if] identifier[staged] :
keyword[try] :
identifier[token] = identifier[StagesToken] ( identifier[staged] )
identifier[token] . identifier[last] = keyword[False]
identifier[tokens] . identifier[append] ( identifier[token] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ParseError] ( literal[string] , identifier[pointer] )
keyword[if] identifier[_] :
keyword[for] identifier[part] keyword[in] identifier[children] . identifier[split] ( literal[string] ):
identifier[part] = identifier[part] . identifier[replace] ( literal[string] , literal[string] )
identifier[part] = identifier[part] . identifier[replace] ( literal[string] , literal[string] )
identifier[token] = identifier[ChildToken] ( identifier[part] )
identifier[token] . identifier[last] = keyword[False]
identifier[tokens] . identifier[append] ( identifier[token] )
keyword[return] identifier[tokens]
|
def parse(self, pointer):
"""parse pointer into tokens"""
if isinstance(pointer, Pointer):
return pointer.tokens[:] # depends on [control=['if'], data=[]]
elif pointer == '':
return [] # depends on [control=['if'], data=[]]
tokens = []
(staged, _, children) = pointer.partition('/')
if staged:
try:
token = StagesToken(staged)
token.last = False
tokens.append(token) # depends on [control=['try'], data=[]]
except ValueError:
raise ParseError('pointer must start with / or int', pointer) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if _:
for part in children.split('/'):
part = part.replace('~1', '/')
part = part.replace('~0', '~')
token = ChildToken(part)
token.last = False
tokens.append(token) # depends on [control=['for'], data=['part']] # depends on [control=['if'], data=[]]
return tokens
|
def swap_word_order(source):
""" Swap the order of the words in 'source' bitstring """
assert len(source) % 4 == 0
words = "I" * (len(source) // 4)
return struct.pack(words, *reversed(struct.unpack(words, source)))
|
def function[swap_word_order, parameter[source]]:
constant[ Swap the order of the words in 'source' bitstring ]
assert[compare[binary_operation[call[name[len], parameter[name[source]]] <ast.Mod object at 0x7da2590d6920> constant[4]] equal[==] constant[0]]]
variable[words] assign[=] binary_operation[constant[I] * binary_operation[call[name[len], parameter[name[source]]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[4]]]
return[call[name[struct].pack, parameter[name[words], <ast.Starred object at 0x7da1b2345000>]]]
|
keyword[def] identifier[swap_word_order] ( identifier[source] ):
literal[string]
keyword[assert] identifier[len] ( identifier[source] )% literal[int] == literal[int]
identifier[words] = literal[string] *( identifier[len] ( identifier[source] )// literal[int] )
keyword[return] identifier[struct] . identifier[pack] ( identifier[words] ,* identifier[reversed] ( identifier[struct] . identifier[unpack] ( identifier[words] , identifier[source] )))
|
def swap_word_order(source):
""" Swap the order of the words in 'source' bitstring """
assert len(source) % 4 == 0
words = 'I' * (len(source) // 4)
return struct.pack(words, *reversed(struct.unpack(words, source)))
|
def pad(self, start_duration=0.0, end_duration=0.0):
'''Add silence to the beginning or end of a file.
Calling this with the default arguments has no effect.
Parameters
----------
start_duration : float
Number of seconds of silence to add to beginning.
end_duration : float
Number of seconds of silence to add to end.
See Also
--------
delay
'''
if not is_number(start_duration) or start_duration < 0:
raise ValueError("Start duration must be a positive number.")
if not is_number(end_duration) or end_duration < 0:
raise ValueError("End duration must be positive.")
effect_args = [
'pad',
'{:f}'.format(start_duration),
'{:f}'.format(end_duration)
]
self.effects.extend(effect_args)
self.effects_log.append('pad')
return self
|
def function[pad, parameter[self, start_duration, end_duration]]:
constant[Add silence to the beginning or end of a file.
Calling this with the default arguments has no effect.
Parameters
----------
start_duration : float
Number of seconds of silence to add to beginning.
end_duration : float
Number of seconds of silence to add to end.
See Also
--------
delay
]
if <ast.BoolOp object at 0x7da1b007d750> begin[:]
<ast.Raise object at 0x7da1b007f910>
if <ast.BoolOp object at 0x7da1b007c3d0> begin[:]
<ast.Raise object at 0x7da1b007c8e0>
variable[effect_args] assign[=] list[[<ast.Constant object at 0x7da1b007e170>, <ast.Call object at 0x7da1b007fa30>, <ast.Call object at 0x7da1b007e1d0>]]
call[name[self].effects.extend, parameter[name[effect_args]]]
call[name[self].effects_log.append, parameter[constant[pad]]]
return[name[self]]
|
keyword[def] identifier[pad] ( identifier[self] , identifier[start_duration] = literal[int] , identifier[end_duration] = literal[int] ):
literal[string]
keyword[if] keyword[not] identifier[is_number] ( identifier[start_duration] ) keyword[or] identifier[start_duration] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] identifier[is_number] ( identifier[end_duration] ) keyword[or] identifier[end_duration] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[effect_args] =[
literal[string] ,
literal[string] . identifier[format] ( identifier[start_duration] ),
literal[string] . identifier[format] ( identifier[end_duration] )
]
identifier[self] . identifier[effects] . identifier[extend] ( identifier[effect_args] )
identifier[self] . identifier[effects_log] . identifier[append] ( literal[string] )
keyword[return] identifier[self]
|
def pad(self, start_duration=0.0, end_duration=0.0):
"""Add silence to the beginning or end of a file.
Calling this with the default arguments has no effect.
Parameters
----------
start_duration : float
Number of seconds of silence to add to beginning.
end_duration : float
Number of seconds of silence to add to end.
See Also
--------
delay
"""
if not is_number(start_duration) or start_duration < 0:
raise ValueError('Start duration must be a positive number.') # depends on [control=['if'], data=[]]
if not is_number(end_duration) or end_duration < 0:
raise ValueError('End duration must be positive.') # depends on [control=['if'], data=[]]
effect_args = ['pad', '{:f}'.format(start_duration), '{:f}'.format(end_duration)]
self.effects.extend(effect_args)
self.effects_log.append('pad')
return self
|
def render_long_description(self, dirname):
"""Convert a package's long description to HTML.
"""
with changedir(dirname):
self.setuptools.check_valid_package()
long_description = self.setuptools.get_long_description()
outfile = abspath('.long-description.html')
self.docutils.publish_string(long_description, outfile, self.styles)
return outfile
|
def function[render_long_description, parameter[self, dirname]]:
constant[Convert a package's long description to HTML.
]
with call[name[changedir], parameter[name[dirname]]] begin[:]
call[name[self].setuptools.check_valid_package, parameter[]]
variable[long_description] assign[=] call[name[self].setuptools.get_long_description, parameter[]]
variable[outfile] assign[=] call[name[abspath], parameter[constant[.long-description.html]]]
call[name[self].docutils.publish_string, parameter[name[long_description], name[outfile], name[self].styles]]
return[name[outfile]]
|
keyword[def] identifier[render_long_description] ( identifier[self] , identifier[dirname] ):
literal[string]
keyword[with] identifier[changedir] ( identifier[dirname] ):
identifier[self] . identifier[setuptools] . identifier[check_valid_package] ()
identifier[long_description] = identifier[self] . identifier[setuptools] . identifier[get_long_description] ()
identifier[outfile] = identifier[abspath] ( literal[string] )
identifier[self] . identifier[docutils] . identifier[publish_string] ( identifier[long_description] , identifier[outfile] , identifier[self] . identifier[styles] )
keyword[return] identifier[outfile]
|
def render_long_description(self, dirname):
"""Convert a package's long description to HTML.
"""
with changedir(dirname):
self.setuptools.check_valid_package()
long_description = self.setuptools.get_long_description()
outfile = abspath('.long-description.html')
self.docutils.publish_string(long_description, outfile, self.styles)
return outfile # depends on [control=['with'], data=[]]
|
def encrypt_encoded(self, encoding, r_value):
"""Paillier encrypt an encoded value.
Args:
encoding: The EncodedNumber instance.
r_value (int): obfuscator for the ciphertext; by default (i.e.
if *r_value* is None), a random value is used.
Returns:
EncryptedNumber: An encryption of *value*.
"""
# If r_value is None, obfuscate in a call to .obfuscate() (below)
obfuscator = r_value or 1
ciphertext = self.raw_encrypt(encoding.encoding, r_value=obfuscator)
encrypted_number = EncryptedNumber(self, ciphertext, encoding.exponent)
if r_value is None:
encrypted_number.obfuscate()
return encrypted_number
|
def function[encrypt_encoded, parameter[self, encoding, r_value]]:
constant[Paillier encrypt an encoded value.
Args:
encoding: The EncodedNumber instance.
r_value (int): obfuscator for the ciphertext; by default (i.e.
if *r_value* is None), a random value is used.
Returns:
EncryptedNumber: An encryption of *value*.
]
variable[obfuscator] assign[=] <ast.BoolOp object at 0x7da18bcc8e80>
variable[ciphertext] assign[=] call[name[self].raw_encrypt, parameter[name[encoding].encoding]]
variable[encrypted_number] assign[=] call[name[EncryptedNumber], parameter[name[self], name[ciphertext], name[encoding].exponent]]
if compare[name[r_value] is constant[None]] begin[:]
call[name[encrypted_number].obfuscate, parameter[]]
return[name[encrypted_number]]
|
keyword[def] identifier[encrypt_encoded] ( identifier[self] , identifier[encoding] , identifier[r_value] ):
literal[string]
identifier[obfuscator] = identifier[r_value] keyword[or] literal[int]
identifier[ciphertext] = identifier[self] . identifier[raw_encrypt] ( identifier[encoding] . identifier[encoding] , identifier[r_value] = identifier[obfuscator] )
identifier[encrypted_number] = identifier[EncryptedNumber] ( identifier[self] , identifier[ciphertext] , identifier[encoding] . identifier[exponent] )
keyword[if] identifier[r_value] keyword[is] keyword[None] :
identifier[encrypted_number] . identifier[obfuscate] ()
keyword[return] identifier[encrypted_number]
|
def encrypt_encoded(self, encoding, r_value):
"""Paillier encrypt an encoded value.
Args:
encoding: The EncodedNumber instance.
r_value (int): obfuscator for the ciphertext; by default (i.e.
if *r_value* is None), a random value is used.
Returns:
EncryptedNumber: An encryption of *value*.
"""
# If r_value is None, obfuscate in a call to .obfuscate() (below)
obfuscator = r_value or 1
ciphertext = self.raw_encrypt(encoding.encoding, r_value=obfuscator)
encrypted_number = EncryptedNumber(self, ciphertext, encoding.exponent)
if r_value is None:
encrypted_number.obfuscate() # depends on [control=['if'], data=[]]
return encrypted_number
|
def merge_dict(lhs, rhs):
""" Merge content of a dict in another
:param: dict: lhs
dict where is merged the second one
:param: dict: rhs
dict whose content is merged
"""
assert isinstance(lhs, dict)
assert isinstance(rhs, dict)
for k, v in rhs.iteritems():
if k not in lhs:
lhs[k] = v
else:
lhs[k] = merge_dict(lhs[k], v)
return lhs
|
def function[merge_dict, parameter[lhs, rhs]]:
constant[ Merge content of a dict in another
:param: dict: lhs
dict where is merged the second one
:param: dict: rhs
dict whose content is merged
]
assert[call[name[isinstance], parameter[name[lhs], name[dict]]]]
assert[call[name[isinstance], parameter[name[rhs], name[dict]]]]
for taget[tuple[[<ast.Name object at 0x7da204620280>, <ast.Name object at 0x7da204621b10>]]] in starred[call[name[rhs].iteritems, parameter[]]] begin[:]
if compare[name[k] <ast.NotIn object at 0x7da2590d7190> name[lhs]] begin[:]
call[name[lhs]][name[k]] assign[=] name[v]
return[name[lhs]]
|
keyword[def] identifier[merge_dict] ( identifier[lhs] , identifier[rhs] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[lhs] , identifier[dict] )
keyword[assert] identifier[isinstance] ( identifier[rhs] , identifier[dict] )
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[rhs] . identifier[iteritems] ():
keyword[if] identifier[k] keyword[not] keyword[in] identifier[lhs] :
identifier[lhs] [ identifier[k] ]= identifier[v]
keyword[else] :
identifier[lhs] [ identifier[k] ]= identifier[merge_dict] ( identifier[lhs] [ identifier[k] ], identifier[v] )
keyword[return] identifier[lhs]
|
def merge_dict(lhs, rhs):
""" Merge content of a dict in another
:param: dict: lhs
dict where is merged the second one
:param: dict: rhs
dict whose content is merged
"""
assert isinstance(lhs, dict)
assert isinstance(rhs, dict)
for (k, v) in rhs.iteritems():
if k not in lhs:
lhs[k] = v # depends on [control=['if'], data=['k', 'lhs']]
else:
lhs[k] = merge_dict(lhs[k], v) # depends on [control=['for'], data=[]]
return lhs
|
def calc_circuit_breaker_position(self, debug=False):
""" Calculates the optimal position of a circuit breaker on route.
Parameters
----------
debug: bool, defaults to False
If True, prints process information.
Returns
-------
int
position of circuit breaker on route (index of last node on 1st half-ring preceding the circuit breaker)
Notes
-----
According to planning principles of MV grids, a MV ring is run as two strings (half-rings) separated by a
circuit breaker which is open at normal operation.
Assuming a ring (route which is connected to the root node at either sides), the optimal position of a circuit
breaker is defined as the position (virtual cable) between two nodes where the conveyed current is minimal on
the route. Instead of the peak current, the peak load is used here (assuming a constant voltage).
The circuit breakers are used here for checking tech. constraints only and will be re-located after connection
of satellites and stations in ding0.grid.mv_grid.tools.set_circuit_breakers
References
----------
See Also
--------
ding0.grid.mv_grid.tools.set_circuit_breakers
"""
# TODO: add references (Tao)
# set init value
demand_diff_min = 10e6
# check possible positions in route
for ctr in range(len(self._nodes)):
# split route and calc demand difference
route_demand_part1 = sum([node.demand() for node in self._nodes[0:ctr]])
route_demand_part2 = sum([node.demand() for node in self._nodes[ctr:len(self._nodes)]])
demand_diff = abs(route_demand_part1 - route_demand_part2)
if demand_diff < demand_diff_min:
demand_diff_min = demand_diff
position = ctr
if debug:
logger.debug('sum 1={}'.format(
sum([node.demand() for node in self._nodes[0:position]])))
logger.debug('sum 2={}'.format(sum([node.demand() for node in
self._nodes[
position:len(self._nodes)]])))
logger.debug(
'Position of circuit breaker: {0}-{1} (sumdiff={2})'.format(
self._nodes[position - 1], self._nodes[position],
demand_diff_min))
return position
|
def function[calc_circuit_breaker_position, parameter[self, debug]]:
constant[ Calculates the optimal position of a circuit breaker on route.
Parameters
----------
debug: bool, defaults to False
If True, prints process information.
Returns
-------
int
position of circuit breaker on route (index of last node on 1st half-ring preceding the circuit breaker)
Notes
-----
According to planning principles of MV grids, a MV ring is run as two strings (half-rings) separated by a
circuit breaker which is open at normal operation.
Assuming a ring (route which is connected to the root node at either sides), the optimal position of a circuit
breaker is defined as the position (virtual cable) between two nodes where the conveyed current is minimal on
the route. Instead of the peak current, the peak load is used here (assuming a constant voltage).
The circuit breakers are used here for checking tech. constraints only and will be re-located after connection
of satellites and stations in ding0.grid.mv_grid.tools.set_circuit_breakers
References
----------
See Also
--------
ding0.grid.mv_grid.tools.set_circuit_breakers
]
variable[demand_diff_min] assign[=] constant[10000000.0]
for taget[name[ctr]] in starred[call[name[range], parameter[call[name[len], parameter[name[self]._nodes]]]]] begin[:]
variable[route_demand_part1] assign[=] call[name[sum], parameter[<ast.ListComp object at 0x7da20c6a8340>]]
variable[route_demand_part2] assign[=] call[name[sum], parameter[<ast.ListComp object at 0x7da20c6abcd0>]]
variable[demand_diff] assign[=] call[name[abs], parameter[binary_operation[name[route_demand_part1] - name[route_demand_part2]]]]
if compare[name[demand_diff] less[<] name[demand_diff_min]] begin[:]
variable[demand_diff_min] assign[=] name[demand_diff]
variable[position] assign[=] name[ctr]
if name[debug] begin[:]
call[name[logger].debug, parameter[call[constant[sum 1={}].format, parameter[call[name[sum], parameter[<ast.ListComp object at 0x7da20c6a8910>]]]]]]
call[name[logger].debug, parameter[call[constant[sum 2={}].format, parameter[call[name[sum], parameter[<ast.ListComp object at 0x7da20c6aa200>]]]]]]
call[name[logger].debug, parameter[call[constant[Position of circuit breaker: {0}-{1} (sumdiff={2})].format, parameter[call[name[self]._nodes][binary_operation[name[position] - constant[1]]], call[name[self]._nodes][name[position]], name[demand_diff_min]]]]]
return[name[position]]
|
keyword[def] identifier[calc_circuit_breaker_position] ( identifier[self] , identifier[debug] = keyword[False] ):
literal[string]
identifier[demand_diff_min] = literal[int]
keyword[for] identifier[ctr] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[_nodes] )):
identifier[route_demand_part1] = identifier[sum] ([ identifier[node] . identifier[demand] () keyword[for] identifier[node] keyword[in] identifier[self] . identifier[_nodes] [ literal[int] : identifier[ctr] ]])
identifier[route_demand_part2] = identifier[sum] ([ identifier[node] . identifier[demand] () keyword[for] identifier[node] keyword[in] identifier[self] . identifier[_nodes] [ identifier[ctr] : identifier[len] ( identifier[self] . identifier[_nodes] )]])
identifier[demand_diff] = identifier[abs] ( identifier[route_demand_part1] - identifier[route_demand_part2] )
keyword[if] identifier[demand_diff] < identifier[demand_diff_min] :
identifier[demand_diff_min] = identifier[demand_diff]
identifier[position] = identifier[ctr]
keyword[if] identifier[debug] :
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] (
identifier[sum] ([ identifier[node] . identifier[demand] () keyword[for] identifier[node] keyword[in] identifier[self] . identifier[_nodes] [ literal[int] : identifier[position] ]])))
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[sum] ([ identifier[node] . identifier[demand] () keyword[for] identifier[node] keyword[in]
identifier[self] . identifier[_nodes] [
identifier[position] : identifier[len] ( identifier[self] . identifier[_nodes] )]])))
identifier[logger] . identifier[debug] (
literal[string] . identifier[format] (
identifier[self] . identifier[_nodes] [ identifier[position] - literal[int] ], identifier[self] . identifier[_nodes] [ identifier[position] ],
identifier[demand_diff_min] ))
keyword[return] identifier[position]
|
def calc_circuit_breaker_position(self, debug=False):
""" Calculates the optimal position of a circuit breaker on route.
Parameters
----------
debug: bool, defaults to False
If True, prints process information.
Returns
-------
int
position of circuit breaker on route (index of last node on 1st half-ring preceding the circuit breaker)
Notes
-----
According to planning principles of MV grids, a MV ring is run as two strings (half-rings) separated by a
circuit breaker which is open at normal operation.
Assuming a ring (route which is connected to the root node at either sides), the optimal position of a circuit
breaker is defined as the position (virtual cable) between two nodes where the conveyed current is minimal on
the route. Instead of the peak current, the peak load is used here (assuming a constant voltage).
The circuit breakers are used here for checking tech. constraints only and will be re-located after connection
of satellites and stations in ding0.grid.mv_grid.tools.set_circuit_breakers
References
----------
See Also
--------
ding0.grid.mv_grid.tools.set_circuit_breakers
"""
# TODO: add references (Tao)
# set init value
demand_diff_min = 10000000.0
# check possible positions in route
for ctr in range(len(self._nodes)):
# split route and calc demand difference
route_demand_part1 = sum([node.demand() for node in self._nodes[0:ctr]])
route_demand_part2 = sum([node.demand() for node in self._nodes[ctr:len(self._nodes)]])
demand_diff = abs(route_demand_part1 - route_demand_part2)
if demand_diff < demand_diff_min:
demand_diff_min = demand_diff
position = ctr # depends on [control=['if'], data=['demand_diff', 'demand_diff_min']] # depends on [control=['for'], data=['ctr']]
if debug:
logger.debug('sum 1={}'.format(sum([node.demand() for node in self._nodes[0:position]])))
logger.debug('sum 2={}'.format(sum([node.demand() for node in self._nodes[position:len(self._nodes)]])))
logger.debug('Position of circuit breaker: {0}-{1} (sumdiff={2})'.format(self._nodes[position - 1], self._nodes[position], demand_diff_min)) # depends on [control=['if'], data=[]]
return position
|
def to_dict(self, lev=0):
"""
Return a dictionary representation of the class
:return: A dict
"""
_spec = self.c_param
_res = {}
lev += 1
for key, val in self._dict.items():
try:
(_, req, _ser, _, null_allowed) = _spec[str(key)]
except KeyError:
try:
_key, lang = key.split("#")
(_, req, _ser, _, null_allowed) = _spec[_key]
except (ValueError, KeyError):
try:
(_, req, _ser, _, null_allowed) = _spec['*']
except KeyError:
_ser = None
if _ser:
val = _ser(val, "dict", lev)
if isinstance(val, Message):
_res[key] = val.to_dict(lev + 1)
elif isinstance(val, list) and isinstance(
next(iter(val or []), None), Message):
_res[key] = [v.to_dict(lev) for v in val]
else:
_res[key] = val
return _res
|
def function[to_dict, parameter[self, lev]]:
constant[
Return a dictionary representation of the class
:return: A dict
]
variable[_spec] assign[=] name[self].c_param
variable[_res] assign[=] dictionary[[], []]
<ast.AugAssign object at 0x7da18f722b00>
for taget[tuple[[<ast.Name object at 0x7da18f7229b0>, <ast.Name object at 0x7da18f721de0>]]] in starred[call[name[self]._dict.items, parameter[]]] begin[:]
<ast.Try object at 0x7da18f722410>
if name[_ser] begin[:]
variable[val] assign[=] call[name[_ser], parameter[name[val], constant[dict], name[lev]]]
if call[name[isinstance], parameter[name[val], name[Message]]] begin[:]
call[name[_res]][name[key]] assign[=] call[name[val].to_dict, parameter[binary_operation[name[lev] + constant[1]]]]
return[name[_res]]
|
keyword[def] identifier[to_dict] ( identifier[self] , identifier[lev] = literal[int] ):
literal[string]
identifier[_spec] = identifier[self] . identifier[c_param]
identifier[_res] ={}
identifier[lev] += literal[int]
keyword[for] identifier[key] , identifier[val] keyword[in] identifier[self] . identifier[_dict] . identifier[items] ():
keyword[try] :
( identifier[_] , identifier[req] , identifier[_ser] , identifier[_] , identifier[null_allowed] )= identifier[_spec] [ identifier[str] ( identifier[key] )]
keyword[except] identifier[KeyError] :
keyword[try] :
identifier[_key] , identifier[lang] = identifier[key] . identifier[split] ( literal[string] )
( identifier[_] , identifier[req] , identifier[_ser] , identifier[_] , identifier[null_allowed] )= identifier[_spec] [ identifier[_key] ]
keyword[except] ( identifier[ValueError] , identifier[KeyError] ):
keyword[try] :
( identifier[_] , identifier[req] , identifier[_ser] , identifier[_] , identifier[null_allowed] )= identifier[_spec] [ literal[string] ]
keyword[except] identifier[KeyError] :
identifier[_ser] = keyword[None]
keyword[if] identifier[_ser] :
identifier[val] = identifier[_ser] ( identifier[val] , literal[string] , identifier[lev] )
keyword[if] identifier[isinstance] ( identifier[val] , identifier[Message] ):
identifier[_res] [ identifier[key] ]= identifier[val] . identifier[to_dict] ( identifier[lev] + literal[int] )
keyword[elif] identifier[isinstance] ( identifier[val] , identifier[list] ) keyword[and] identifier[isinstance] (
identifier[next] ( identifier[iter] ( identifier[val] keyword[or] []), keyword[None] ), identifier[Message] ):
identifier[_res] [ identifier[key] ]=[ identifier[v] . identifier[to_dict] ( identifier[lev] ) keyword[for] identifier[v] keyword[in] identifier[val] ]
keyword[else] :
identifier[_res] [ identifier[key] ]= identifier[val]
keyword[return] identifier[_res]
|
def to_dict(self, lev=0):
"""
Return a dictionary representation of the class
:return: A dict
"""
_spec = self.c_param
_res = {}
lev += 1
for (key, val) in self._dict.items():
try:
(_, req, _ser, _, null_allowed) = _spec[str(key)] # depends on [control=['try'], data=[]]
except KeyError:
try:
(_key, lang) = key.split('#')
(_, req, _ser, _, null_allowed) = _spec[_key] # depends on [control=['try'], data=[]]
except (ValueError, KeyError):
try:
(_, req, _ser, _, null_allowed) = _spec['*'] # depends on [control=['try'], data=[]]
except KeyError:
_ser = None # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
if _ser:
val = _ser(val, 'dict', lev) # depends on [control=['if'], data=[]]
if isinstance(val, Message):
_res[key] = val.to_dict(lev + 1) # depends on [control=['if'], data=[]]
elif isinstance(val, list) and isinstance(next(iter(val or []), None), Message):
_res[key] = [v.to_dict(lev) for v in val] # depends on [control=['if'], data=[]]
else:
_res[key] = val # depends on [control=['for'], data=[]]
return _res
|
def undo_last_save(self, logMessage=None):
"""Undo the last change made to the datastream content and profile, effectively
reverting to the object state in Fedora as of the specified timestamp.
For a versioned datastream, this will purge the most recent datastream.
For an unversioned datastream, this will overwrite the last changes with
a cached version of any content and/or info pulled from Fedora.
"""
# NOTE: currently not clearing any of the object caches and backups
# of fedora content and datastream info, as it is unclear what (if anything)
# should be cleared
if self.versionable:
# if this is a versioned datastream, get datastream history
# and purge the most recent version
last_save = self.history().versions[0].created # fedora returns most recent first
r = self.obj.api.purgeDatastream(self.obj.pid, self.id,
datetime_to_fedoratime(last_save),
logMessage=logMessage)
return r.status_code == requests.codes.ok
else:
# for an unversioned datastream, update with any content and info
# backups that were pulled from Fedora before any modifications were made
args = {}
if self._content_backup is not None:
args['content'] = self._content_backup
if self._info_backup is not None:
args.update(self._info_backup)
r = self.obj.api.modifyDatastream(self.obj.pid, self.id,
logMessage=logMessage, **args)
return r.status_code == requests.codes.ok
|
def function[undo_last_save, parameter[self, logMessage]]:
constant[Undo the last change made to the datastream content and profile, effectively
reverting to the object state in Fedora as of the specified timestamp.
For a versioned datastream, this will purge the most recent datastream.
For an unversioned datastream, this will overwrite the last changes with
a cached version of any content and/or info pulled from Fedora.
]
if name[self].versionable begin[:]
variable[last_save] assign[=] call[call[name[self].history, parameter[]].versions][constant[0]].created
variable[r] assign[=] call[name[self].obj.api.purgeDatastream, parameter[name[self].obj.pid, name[self].id, call[name[datetime_to_fedoratime], parameter[name[last_save]]]]]
return[compare[name[r].status_code equal[==] name[requests].codes.ok]]
|
keyword[def] identifier[undo_last_save] ( identifier[self] , identifier[logMessage] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[versionable] :
identifier[last_save] = identifier[self] . identifier[history] (). identifier[versions] [ literal[int] ]. identifier[created]
identifier[r] = identifier[self] . identifier[obj] . identifier[api] . identifier[purgeDatastream] ( identifier[self] . identifier[obj] . identifier[pid] , identifier[self] . identifier[id] ,
identifier[datetime_to_fedoratime] ( identifier[last_save] ),
identifier[logMessage] = identifier[logMessage] )
keyword[return] identifier[r] . identifier[status_code] == identifier[requests] . identifier[codes] . identifier[ok]
keyword[else] :
identifier[args] ={}
keyword[if] identifier[self] . identifier[_content_backup] keyword[is] keyword[not] keyword[None] :
identifier[args] [ literal[string] ]= identifier[self] . identifier[_content_backup]
keyword[if] identifier[self] . identifier[_info_backup] keyword[is] keyword[not] keyword[None] :
identifier[args] . identifier[update] ( identifier[self] . identifier[_info_backup] )
identifier[r] = identifier[self] . identifier[obj] . identifier[api] . identifier[modifyDatastream] ( identifier[self] . identifier[obj] . identifier[pid] , identifier[self] . identifier[id] ,
identifier[logMessage] = identifier[logMessage] ,** identifier[args] )
keyword[return] identifier[r] . identifier[status_code] == identifier[requests] . identifier[codes] . identifier[ok]
|
def undo_last_save(self, logMessage=None):
"""Undo the last change made to the datastream content and profile, effectively
reverting to the object state in Fedora as of the specified timestamp.
For a versioned datastream, this will purge the most recent datastream.
For an unversioned datastream, this will overwrite the last changes with
a cached version of any content and/or info pulled from Fedora.
"""
# NOTE: currently not clearing any of the object caches and backups
# of fedora content and datastream info, as it is unclear what (if anything)
# should be cleared
if self.versionable:
# if this is a versioned datastream, get datastream history
# and purge the most recent version
last_save = self.history().versions[0].created # fedora returns most recent first
r = self.obj.api.purgeDatastream(self.obj.pid, self.id, datetime_to_fedoratime(last_save), logMessage=logMessage)
return r.status_code == requests.codes.ok # depends on [control=['if'], data=[]]
else:
# for an unversioned datastream, update with any content and info
# backups that were pulled from Fedora before any modifications were made
args = {}
if self._content_backup is not None:
args['content'] = self._content_backup # depends on [control=['if'], data=[]]
if self._info_backup is not None:
args.update(self._info_backup) # depends on [control=['if'], data=[]]
r = self.obj.api.modifyDatastream(self.obj.pid, self.id, logMessage=logMessage, **args)
return r.status_code == requests.codes.ok
|
def create(self, friendly_name=values.unset, unique_name=values.unset,
attributes=values.unset, type=values.unset):
"""
Create a new ChannelInstance
:param unicode friendly_name: A human-readable name for the Channel.
:param unicode unique_name: A unique, addressable name for the Channel.
:param unicode attributes: An optional metadata field you can use to store any data you wish.
:param ChannelInstance.ChannelType type: The visibility of the channel - public or private.
:returns: Newly created ChannelInstance
:rtype: twilio.rest.chat.v1.service.channel.ChannelInstance
"""
data = values.of({
'FriendlyName': friendly_name,
'UniqueName': unique_name,
'Attributes': attributes,
'Type': type,
})
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return ChannelInstance(self._version, payload, service_sid=self._solution['service_sid'], )
|
def function[create, parameter[self, friendly_name, unique_name, attributes, type]]:
constant[
Create a new ChannelInstance
:param unicode friendly_name: A human-readable name for the Channel.
:param unicode unique_name: A unique, addressable name for the Channel.
:param unicode attributes: An optional metadata field you can use to store any data you wish.
:param ChannelInstance.ChannelType type: The visibility of the channel - public or private.
:returns: Newly created ChannelInstance
:rtype: twilio.rest.chat.v1.service.channel.ChannelInstance
]
variable[data] assign[=] call[name[values].of, parameter[dictionary[[<ast.Constant object at 0x7da1b1eaea40>, <ast.Constant object at 0x7da1b1eadb40>, <ast.Constant object at 0x7da1b1eae560>, <ast.Constant object at 0x7da1b1eae9b0>], [<ast.Name object at 0x7da1b1eae290>, <ast.Name object at 0x7da1b1eafac0>, <ast.Name object at 0x7da1b1eaee60>, <ast.Name object at 0x7da1b1eacd60>]]]]
variable[payload] assign[=] call[name[self]._version.create, parameter[constant[POST], name[self]._uri]]
return[call[name[ChannelInstance], parameter[name[self]._version, name[payload]]]]
|
keyword[def] identifier[create] ( identifier[self] , identifier[friendly_name] = identifier[values] . identifier[unset] , identifier[unique_name] = identifier[values] . identifier[unset] ,
identifier[attributes] = identifier[values] . identifier[unset] , identifier[type] = identifier[values] . identifier[unset] ):
literal[string]
identifier[data] = identifier[values] . identifier[of] ({
literal[string] : identifier[friendly_name] ,
literal[string] : identifier[unique_name] ,
literal[string] : identifier[attributes] ,
literal[string] : identifier[type] ,
})
identifier[payload] = identifier[self] . identifier[_version] . identifier[create] (
literal[string] ,
identifier[self] . identifier[_uri] ,
identifier[data] = identifier[data] ,
)
keyword[return] identifier[ChannelInstance] ( identifier[self] . identifier[_version] , identifier[payload] , identifier[service_sid] = identifier[self] . identifier[_solution] [ literal[string] ],)
|
def create(self, friendly_name=values.unset, unique_name=values.unset, attributes=values.unset, type=values.unset):
"""
Create a new ChannelInstance
:param unicode friendly_name: A human-readable name for the Channel.
:param unicode unique_name: A unique, addressable name for the Channel.
:param unicode attributes: An optional metadata field you can use to store any data you wish.
:param ChannelInstance.ChannelType type: The visibility of the channel - public or private.
:returns: Newly created ChannelInstance
:rtype: twilio.rest.chat.v1.service.channel.ChannelInstance
"""
data = values.of({'FriendlyName': friendly_name, 'UniqueName': unique_name, 'Attributes': attributes, 'Type': type})
payload = self._version.create('POST', self._uri, data=data)
return ChannelInstance(self._version, payload, service_sid=self._solution['service_sid'])
|
def get_dataframe(self, force_computation=False):
"""
Preprocesses then transforms the return of fetch().
Args:
force_computation (bool, optional) : Defaults to False. If set to True, forces the computation of DataFrame at each call.
Returns:
pandas.DataFrame: Preprocessed and transformed DataFrame.
"""
# returns df if it was already computed
if self.df is not None and not force_computation: return self.df
self.df = self.fetch(self.context)
# compute df = transform(preprocess(df)
self.df = self.preprocess(self.df)
self.transform(self.df)
return self.df
|
def function[get_dataframe, parameter[self, force_computation]]:
constant[
Preprocesses then transforms the return of fetch().
Args:
force_computation (bool, optional) : Defaults to False. If set to True, forces the computation of DataFrame at each call.
Returns:
pandas.DataFrame: Preprocessed and transformed DataFrame.
]
if <ast.BoolOp object at 0x7da18f7224d0> begin[:]
return[name[self].df]
name[self].df assign[=] call[name[self].fetch, parameter[name[self].context]]
name[self].df assign[=] call[name[self].preprocess, parameter[name[self].df]]
call[name[self].transform, parameter[name[self].df]]
return[name[self].df]
|
keyword[def] identifier[get_dataframe] ( identifier[self] , identifier[force_computation] = keyword[False] ):
literal[string]
keyword[if] identifier[self] . identifier[df] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[force_computation] : keyword[return] identifier[self] . identifier[df]
identifier[self] . identifier[df] = identifier[self] . identifier[fetch] ( identifier[self] . identifier[context] )
identifier[self] . identifier[df] = identifier[self] . identifier[preprocess] ( identifier[self] . identifier[df] )
identifier[self] . identifier[transform] ( identifier[self] . identifier[df] )
keyword[return] identifier[self] . identifier[df]
|
def get_dataframe(self, force_computation=False):
"""
Preprocesses then transforms the return of fetch().
Args:
force_computation (bool, optional) : Defaults to False. If set to True, forces the computation of DataFrame at each call.
Returns:
pandas.DataFrame: Preprocessed and transformed DataFrame.
"""
# returns df if it was already computed
if self.df is not None and (not force_computation):
return self.df # depends on [control=['if'], data=[]]
self.df = self.fetch(self.context)
# compute df = transform(preprocess(df)
self.df = self.preprocess(self.df)
self.transform(self.df)
return self.df
|
def verify(signature: Signature, message: bytes, ver_key: VerKey, gen: Generator) -> bool:
"""
Verifies the message signature and returns true - if signature valid or false otherwise.
:param: signature - Signature to verify
:param: message - Message to verify
:param: ver_key - Verification key
:param: gen - Generator point
:return: true if signature valid
"""
logger = logging.getLogger(__name__)
logger.debug("Bls::verify: >>> signature: %r, message: %r, ver_key: %r, gen: %r", signature, message, ver_key,
gen)
valid = c_bool()
do_call('indy_crypto_bsl_verify',
signature.c_instance,
message, len(message),
ver_key.c_instance,
gen.c_instance,
byref(valid))
res = valid
logger.debug("Bls::verify: <<< res: %r", res)
return res
|
def function[verify, parameter[signature, message, ver_key, gen]]:
constant[
Verifies the message signature and returns true - if signature valid or false otherwise.
:param: signature - Signature to verify
:param: message - Message to verify
:param: ver_key - Verification key
:param: gen - Generator point
:return: true if signature valid
]
variable[logger] assign[=] call[name[logging].getLogger, parameter[name[__name__]]]
call[name[logger].debug, parameter[constant[Bls::verify: >>> signature: %r, message: %r, ver_key: %r, gen: %r], name[signature], name[message], name[ver_key], name[gen]]]
variable[valid] assign[=] call[name[c_bool], parameter[]]
call[name[do_call], parameter[constant[indy_crypto_bsl_verify], name[signature].c_instance, name[message], call[name[len], parameter[name[message]]], name[ver_key].c_instance, name[gen].c_instance, call[name[byref], parameter[name[valid]]]]]
variable[res] assign[=] name[valid]
call[name[logger].debug, parameter[constant[Bls::verify: <<< res: %r], name[res]]]
return[name[res]]
|
keyword[def] identifier[verify] ( identifier[signature] : identifier[Signature] , identifier[message] : identifier[bytes] , identifier[ver_key] : identifier[VerKey] , identifier[gen] : identifier[Generator] )-> identifier[bool] :
literal[string]
identifier[logger] = identifier[logging] . identifier[getLogger] ( identifier[__name__] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[signature] , identifier[message] , identifier[ver_key] ,
identifier[gen] )
identifier[valid] = identifier[c_bool] ()
identifier[do_call] ( literal[string] ,
identifier[signature] . identifier[c_instance] ,
identifier[message] , identifier[len] ( identifier[message] ),
identifier[ver_key] . identifier[c_instance] ,
identifier[gen] . identifier[c_instance] ,
identifier[byref] ( identifier[valid] ))
identifier[res] = identifier[valid]
identifier[logger] . identifier[debug] ( literal[string] , identifier[res] )
keyword[return] identifier[res]
|
def verify(signature: Signature, message: bytes, ver_key: VerKey, gen: Generator) -> bool:
"""
Verifies the message signature and returns true - if signature valid or false otherwise.
:param: signature - Signature to verify
:param: message - Message to verify
:param: ver_key - Verification key
:param: gen - Generator point
:return: true if signature valid
"""
logger = logging.getLogger(__name__)
logger.debug('Bls::verify: >>> signature: %r, message: %r, ver_key: %r, gen: %r', signature, message, ver_key, gen)
valid = c_bool()
do_call('indy_crypto_bsl_verify', signature.c_instance, message, len(message), ver_key.c_instance, gen.c_instance, byref(valid))
res = valid
logger.debug('Bls::verify: <<< res: %r', res)
return res
|
def concat(self, *dss, **kwargs):
"""
Concatenate dataswim instances from and
set it to the main dataframe
:param dss: dataswim instances to concatenate
:type dss: Ds
:param kwargs: keyword arguments for ``pd.concat``
"""
try:
df = pd.DataFrame()
for dsx in dss:
df = pd.concat([df, dsx.df], **kwargs)
self.df = df
except Exception as e:
self.err(e, "Can not concatenate data")
|
def function[concat, parameter[self]]:
constant[
Concatenate dataswim instances from and
set it to the main dataframe
:param dss: dataswim instances to concatenate
:type dss: Ds
:param kwargs: keyword arguments for ``pd.concat``
]
<ast.Try object at 0x7da18f812920>
|
keyword[def] identifier[concat] ( identifier[self] ,* identifier[dss] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
identifier[df] = identifier[pd] . identifier[DataFrame] ()
keyword[for] identifier[dsx] keyword[in] identifier[dss] :
identifier[df] = identifier[pd] . identifier[concat] ([ identifier[df] , identifier[dsx] . identifier[df] ],** identifier[kwargs] )
identifier[self] . identifier[df] = identifier[df]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[err] ( identifier[e] , literal[string] )
|
def concat(self, *dss, **kwargs):
"""
Concatenate dataswim instances from and
set it to the main dataframe
:param dss: dataswim instances to concatenate
:type dss: Ds
:param kwargs: keyword arguments for ``pd.concat``
"""
try:
df = pd.DataFrame()
for dsx in dss:
df = pd.concat([df, dsx.df], **kwargs) # depends on [control=['for'], data=['dsx']]
self.df = df # depends on [control=['try'], data=[]]
except Exception as e:
self.err(e, 'Can not concatenate data') # depends on [control=['except'], data=['e']]
|
def render_inner(self, token):
"""
Recursively renders child tokens. Joins the rendered
strings with no space in between.
If newlines / spaces are needed between tokens, add them
in their respective templates, or override this function
in the renderer subclass, so that whitespace won't seem to
appear magically for anyone reading your program.
Arguments:
token: a branch node who has children attribute.
"""
rendered = [self.render(child) for child in token.children]
return ''.join(rendered)
|
def function[render_inner, parameter[self, token]]:
constant[
Recursively renders child tokens. Joins the rendered
strings with no space in between.
If newlines / spaces are needed between tokens, add them
in their respective templates, or override this function
in the renderer subclass, so that whitespace won't seem to
appear magically for anyone reading your program.
Arguments:
token: a branch node who has children attribute.
]
variable[rendered] assign[=] <ast.ListComp object at 0x7da204567280>
return[call[constant[].join, parameter[name[rendered]]]]
|
keyword[def] identifier[render_inner] ( identifier[self] , identifier[token] ):
literal[string]
identifier[rendered] =[ identifier[self] . identifier[render] ( identifier[child] ) keyword[for] identifier[child] keyword[in] identifier[token] . identifier[children] ]
keyword[return] literal[string] . identifier[join] ( identifier[rendered] )
|
def render_inner(self, token):
"""
Recursively renders child tokens. Joins the rendered
strings with no space in between.
If newlines / spaces are needed between tokens, add them
in their respective templates, or override this function
in the renderer subclass, so that whitespace won't seem to
appear magically for anyone reading your program.
Arguments:
token: a branch node who has children attribute.
"""
rendered = [self.render(child) for child in token.children]
return ''.join(rendered)
|
def setcal(self, cal, cal_error, offset, offset_err, data_type):
"""Set the dataset calibration coefficients.
Args::
cal the calibraton factor (attribute 'scale_factor')
cal_error calibration factor error
(attribute 'scale_factor_err')
offset offset value (attribute 'add_offset')
offset_err offset error (attribute 'add_offset_err')
data_type data type of the values resulting from applying the
calibration formula to the dataset values
(one of the SDC.xxx constants)
(attribute 'calibrated_nt')
Returns::
None
See method 'getcal' for the definition of the calibration
formula.
Calibration coefficients are part of the so-called standard
SDS attributes. Calling 'setcal' is equivalent to setting
the following attributes, which correspond to the method
parameters, in order::
scale_factor, scale_factor_err, add_offset, add_offset_err,
calibrated_nt
C library equivalent: SDsetcal
"""
status = _C.SDsetcal(self._id, cal, cal_error,
offset, offset_err, data_type)
_checkErr('setcal', status, 'cannot execute')
|
def function[setcal, parameter[self, cal, cal_error, offset, offset_err, data_type]]:
constant[Set the dataset calibration coefficients.
Args::
cal the calibraton factor (attribute 'scale_factor')
cal_error calibration factor error
(attribute 'scale_factor_err')
offset offset value (attribute 'add_offset')
offset_err offset error (attribute 'add_offset_err')
data_type data type of the values resulting from applying the
calibration formula to the dataset values
(one of the SDC.xxx constants)
(attribute 'calibrated_nt')
Returns::
None
See method 'getcal' for the definition of the calibration
formula.
Calibration coefficients are part of the so-called standard
SDS attributes. Calling 'setcal' is equivalent to setting
the following attributes, which correspond to the method
parameters, in order::
scale_factor, scale_factor_err, add_offset, add_offset_err,
calibrated_nt
C library equivalent: SDsetcal
]
variable[status] assign[=] call[name[_C].SDsetcal, parameter[name[self]._id, name[cal], name[cal_error], name[offset], name[offset_err], name[data_type]]]
call[name[_checkErr], parameter[constant[setcal], name[status], constant[cannot execute]]]
|
keyword[def] identifier[setcal] ( identifier[self] , identifier[cal] , identifier[cal_error] , identifier[offset] , identifier[offset_err] , identifier[data_type] ):
literal[string]
identifier[status] = identifier[_C] . identifier[SDsetcal] ( identifier[self] . identifier[_id] , identifier[cal] , identifier[cal_error] ,
identifier[offset] , identifier[offset_err] , identifier[data_type] )
identifier[_checkErr] ( literal[string] , identifier[status] , literal[string] )
|
def setcal(self, cal, cal_error, offset, offset_err, data_type):
"""Set the dataset calibration coefficients.
Args::
cal the calibraton factor (attribute 'scale_factor')
cal_error calibration factor error
(attribute 'scale_factor_err')
offset offset value (attribute 'add_offset')
offset_err offset error (attribute 'add_offset_err')
data_type data type of the values resulting from applying the
calibration formula to the dataset values
(one of the SDC.xxx constants)
(attribute 'calibrated_nt')
Returns::
None
See method 'getcal' for the definition of the calibration
formula.
Calibration coefficients are part of the so-called standard
SDS attributes. Calling 'setcal' is equivalent to setting
the following attributes, which correspond to the method
parameters, in order::
scale_factor, scale_factor_err, add_offset, add_offset_err,
calibrated_nt
C library equivalent: SDsetcal
"""
status = _C.SDsetcal(self._id, cal, cal_error, offset, offset_err, data_type)
_checkErr('setcal', status, 'cannot execute')
|
def to_serializable(self, use_bytes=False, bytes_type=bytes):
"""Convert a :class:`SampleSet` to a serializable object.
Note that the contents of the :attr:`.SampleSet.info` field are assumed
to be serializable.
Args:
use_bytes (bool, optional, default=False):
If True, a compact representation representing the biases as bytes is used.
bytes_type (class, optional, default=bytes):
This class will be used to wrap the bytes objects in the
serialization if `use_bytes` is true. Useful for when using
Python 2 and using BSON encoding, which will not accept the raw
`bytes` type, so `bson.Binary` can be used instead.
Returns:
dict: Object that can be serialized.
Examples:
This example encodes using JSON.
>>> import dimod
>>> import json
...
>>> samples = dimod.SampleSet.from_samples([-1, 1, -1], dimod.SPIN, energy=-.5)
>>> s = json.dumps(samples.to_serializable())
See also:
:meth:`~.SampleSet.from_serializable`
"""
schema_version = "2.0.0"
record = {name: array2bytes(vector)
for name, vector in self.data_vectors.items()}
record['sample'] = array2bytes(np.packbits(self.record.sample > 0))
if not use_bytes:
for name in record:
record[name] = base64.b64encode(record[name]).decode("UTF-8")
return {"basetype": "SampleSet",
"type": type(self).__name__,
"record": record,
"sample_dtype": str(self.record.sample.dtype), # need this to unpack
"sample_shape": self.record.sample.shape, # need this to unpack
"variable_type": self.vartype.name,
"info": self.info,
"version": {"dimod": __version__,
"sampleset_schema": schema_version},
"variable_labels": list(self.variables),
"use_bytes": bool(use_bytes)}
|
def function[to_serializable, parameter[self, use_bytes, bytes_type]]:
constant[Convert a :class:`SampleSet` to a serializable object.
Note that the contents of the :attr:`.SampleSet.info` field are assumed
to be serializable.
Args:
use_bytes (bool, optional, default=False):
If True, a compact representation representing the biases as bytes is used.
bytes_type (class, optional, default=bytes):
This class will be used to wrap the bytes objects in the
serialization if `use_bytes` is true. Useful for when using
Python 2 and using BSON encoding, which will not accept the raw
`bytes` type, so `bson.Binary` can be used instead.
Returns:
dict: Object that can be serialized.
Examples:
This example encodes using JSON.
>>> import dimod
>>> import json
...
>>> samples = dimod.SampleSet.from_samples([-1, 1, -1], dimod.SPIN, energy=-.5)
>>> s = json.dumps(samples.to_serializable())
See also:
:meth:`~.SampleSet.from_serializable`
]
variable[schema_version] assign[=] constant[2.0.0]
variable[record] assign[=] <ast.DictComp object at 0x7da1b07f4dc0>
call[name[record]][constant[sample]] assign[=] call[name[array2bytes], parameter[call[name[np].packbits, parameter[compare[name[self].record.sample greater[>] constant[0]]]]]]
if <ast.UnaryOp object at 0x7da1b07f5ed0> begin[:]
for taget[name[name]] in starred[name[record]] begin[:]
call[name[record]][name[name]] assign[=] call[call[name[base64].b64encode, parameter[call[name[record]][name[name]]]].decode, parameter[constant[UTF-8]]]
return[dictionary[[<ast.Constant object at 0x7da1b07f5f60>, <ast.Constant object at 0x7da1b07f6620>, <ast.Constant object at 0x7da1b07f6a70>, <ast.Constant object at 0x7da1b07f6ef0>, <ast.Constant object at 0x7da1b07f6fb0>, <ast.Constant object at 0x7da1b07f5e40>, <ast.Constant object at 0x7da1b07f7e50>, <ast.Constant object at 0x7da1b07f4250>, <ast.Constant object at 0x7da1b07f7670>, <ast.Constant object at 0x7da1b07f4a30>], [<ast.Constant object at 0x7da1b07f4ca0>, <ast.Attribute object at 0x7da1b07f6b90>, <ast.Name object at 0x7da1b07f4eb0>, <ast.Call object at 0x7da1b07f6440>, <ast.Attribute object at 0x7da1b07f5cc0>, <ast.Attribute object at 0x7da1b07f7490>, <ast.Attribute object at 0x7da1b07f75b0>, <ast.Dict object at 0x7da1b07f6380>, <ast.Call object at 0x7da1b07f4760>, <ast.Call object at 0x7da1b07f5420>]]]
|
keyword[def] identifier[to_serializable] ( identifier[self] , identifier[use_bytes] = keyword[False] , identifier[bytes_type] = identifier[bytes] ):
literal[string]
identifier[schema_version] = literal[string]
identifier[record] ={ identifier[name] : identifier[array2bytes] ( identifier[vector] )
keyword[for] identifier[name] , identifier[vector] keyword[in] identifier[self] . identifier[data_vectors] . identifier[items] ()}
identifier[record] [ literal[string] ]= identifier[array2bytes] ( identifier[np] . identifier[packbits] ( identifier[self] . identifier[record] . identifier[sample] > literal[int] ))
keyword[if] keyword[not] identifier[use_bytes] :
keyword[for] identifier[name] keyword[in] identifier[record] :
identifier[record] [ identifier[name] ]= identifier[base64] . identifier[b64encode] ( identifier[record] [ identifier[name] ]). identifier[decode] ( literal[string] )
keyword[return] { literal[string] : literal[string] ,
literal[string] : identifier[type] ( identifier[self] ). identifier[__name__] ,
literal[string] : identifier[record] ,
literal[string] : identifier[str] ( identifier[self] . identifier[record] . identifier[sample] . identifier[dtype] ),
literal[string] : identifier[self] . identifier[record] . identifier[sample] . identifier[shape] ,
literal[string] : identifier[self] . identifier[vartype] . identifier[name] ,
literal[string] : identifier[self] . identifier[info] ,
literal[string] :{ literal[string] : identifier[__version__] ,
literal[string] : identifier[schema_version] },
literal[string] : identifier[list] ( identifier[self] . identifier[variables] ),
literal[string] : identifier[bool] ( identifier[use_bytes] )}
|
def to_serializable(self, use_bytes=False, bytes_type=bytes):
"""Convert a :class:`SampleSet` to a serializable object.
Note that the contents of the :attr:`.SampleSet.info` field are assumed
to be serializable.
Args:
use_bytes (bool, optional, default=False):
If True, a compact representation representing the biases as bytes is used.
bytes_type (class, optional, default=bytes):
This class will be used to wrap the bytes objects in the
serialization if `use_bytes` is true. Useful for when using
Python 2 and using BSON encoding, which will not accept the raw
`bytes` type, so `bson.Binary` can be used instead.
Returns:
dict: Object that can be serialized.
Examples:
This example encodes using JSON.
>>> import dimod
>>> import json
...
>>> samples = dimod.SampleSet.from_samples([-1, 1, -1], dimod.SPIN, energy=-.5)
>>> s = json.dumps(samples.to_serializable())
See also:
:meth:`~.SampleSet.from_serializable`
"""
schema_version = '2.0.0'
record = {name: array2bytes(vector) for (name, vector) in self.data_vectors.items()}
record['sample'] = array2bytes(np.packbits(self.record.sample > 0))
if not use_bytes:
for name in record:
record[name] = base64.b64encode(record[name]).decode('UTF-8') # depends on [control=['for'], data=['name']] # depends on [control=['if'], data=[]] # need this to unpack
# need this to unpack
return {'basetype': 'SampleSet', 'type': type(self).__name__, 'record': record, 'sample_dtype': str(self.record.sample.dtype), 'sample_shape': self.record.sample.shape, 'variable_type': self.vartype.name, 'info': self.info, 'version': {'dimod': __version__, 'sampleset_schema': schema_version}, 'variable_labels': list(self.variables), 'use_bytes': bool(use_bytes)}
|
def get_catch_vars(catch):
"""Returns 2-tuple with names of catch control vars, e.g. for "catch $was_exc, $exc"
it returns ('was_exc', 'err').
Args:
catch: the whole catch line
Returns:
2-tuple with names of catch control variables
Raises:
exceptions.YamlSyntaxError if the catch line is malformed
"""
catch_re = re.compile(r'catch\s+(\${?\S+}?),\s*(\${?\S+}?)')
res = catch_re.match(catch)
if res is None:
err = 'Catch must have format "catch $x, $y", got "{0}"'.format(catch)
raise exceptions.YamlSyntaxError(err)
return get_var_name(res.group(1)), get_var_name(res.group(2))
|
def function[get_catch_vars, parameter[catch]]:
constant[Returns 2-tuple with names of catch control vars, e.g. for "catch $was_exc, $exc"
it returns ('was_exc', 'err').
Args:
catch: the whole catch line
Returns:
2-tuple with names of catch control variables
Raises:
exceptions.YamlSyntaxError if the catch line is malformed
]
variable[catch_re] assign[=] call[name[re].compile, parameter[constant[catch\s+(\${?\S+}?),\s*(\${?\S+}?)]]]
variable[res] assign[=] call[name[catch_re].match, parameter[name[catch]]]
if compare[name[res] is constant[None]] begin[:]
variable[err] assign[=] call[constant[Catch must have format "catch $x, $y", got "{0}"].format, parameter[name[catch]]]
<ast.Raise object at 0x7da1b0fadf60>
return[tuple[[<ast.Call object at 0x7da1b0fae320>, <ast.Call object at 0x7da1b0faca30>]]]
|
keyword[def] identifier[get_catch_vars] ( identifier[catch] ):
literal[string]
identifier[catch_re] = identifier[re] . identifier[compile] ( literal[string] )
identifier[res] = identifier[catch_re] . identifier[match] ( identifier[catch] )
keyword[if] identifier[res] keyword[is] keyword[None] :
identifier[err] = literal[string] . identifier[format] ( identifier[catch] )
keyword[raise] identifier[exceptions] . identifier[YamlSyntaxError] ( identifier[err] )
keyword[return] identifier[get_var_name] ( identifier[res] . identifier[group] ( literal[int] )), identifier[get_var_name] ( identifier[res] . identifier[group] ( literal[int] ))
|
def get_catch_vars(catch):
"""Returns 2-tuple with names of catch control vars, e.g. for "catch $was_exc, $exc"
it returns ('was_exc', 'err').
Args:
catch: the whole catch line
Returns:
2-tuple with names of catch control variables
Raises:
exceptions.YamlSyntaxError if the catch line is malformed
"""
catch_re = re.compile('catch\\s+(\\${?\\S+}?),\\s*(\\${?\\S+}?)')
res = catch_re.match(catch)
if res is None:
err = 'Catch must have format "catch $x, $y", got "{0}"'.format(catch)
raise exceptions.YamlSyntaxError(err) # depends on [control=['if'], data=[]]
return (get_var_name(res.group(1)), get_var_name(res.group(2)))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.