code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def send_email(template_name, context=None, *args, **kwargs):
"""
Send a templated email.
To generate the message used for the email, the method first
searches for an HTML template with the given name
(eg: <template>.html), and renders it with the provided context. The
process is repeated for the plain text message except a 'txt'
extension is used. All other options are forwarded to Django's
``send_mail`` function.
Args:
template_name:
The name of the template to use without an extension. The
extensions ``html`` and ``txt`` are appended to the template
name and then rendered to provide the email content.
context:
A dictionary containing the context to render the message
with. Defaults to an empty dictionary.
Returns:
``1`` if the email is succesfully sent and ``0`` otherwise. The
return values come from Django's ``send_mail`` function.
Throws:
NoTemplatesException:
If neither the HTML nor plain text template can be loaded.
"""
context = context or {}
try:
html = render_to_string(
context=context,
template_name='{}.html'.format(template_name),
)
except TemplateDoesNotExist:
html = ''
try:
text = render_to_string(
context=context,
template_name='{}.txt'.format(template_name),
)
except TemplateDoesNotExist:
text = ''
if not html and not text:
raise NoTemplatesException(template_name)
return mail.send_mail(
*args,
html_message=html,
message=text,
**kwargs
) | def function[send_email, parameter[template_name, context]]:
constant[
Send a templated email.
To generate the message used for the email, the method first
searches for an HTML template with the given name
(eg: <template>.html), and renders it with the provided context. The
process is repeated for the plain text message except a 'txt'
extension is used. All other options are forwarded to Django's
``send_mail`` function.
Args:
template_name:
The name of the template to use without an extension. The
extensions ``html`` and ``txt`` are appended to the template
name and then rendered to provide the email content.
context:
A dictionary containing the context to render the message
with. Defaults to an empty dictionary.
Returns:
``1`` if the email is succesfully sent and ``0`` otherwise. The
return values come from Django's ``send_mail`` function.
Throws:
NoTemplatesException:
If neither the HTML nor plain text template can be loaded.
]
variable[context] assign[=] <ast.BoolOp object at 0x7da1b08794b0>
<ast.Try object at 0x7da1b0878d00>
<ast.Try object at 0x7da1b08789d0>
if <ast.BoolOp object at 0x7da1b087a590> begin[:]
<ast.Raise object at 0x7da1b0879de0>
return[call[name[mail].send_mail, parameter[<ast.Starred object at 0x7da1b0852c20>]]] | keyword[def] identifier[send_email] ( identifier[template_name] , identifier[context] = keyword[None] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[context] = identifier[context] keyword[or] {}
keyword[try] :
identifier[html] = identifier[render_to_string] (
identifier[context] = identifier[context] ,
identifier[template_name] = literal[string] . identifier[format] ( identifier[template_name] ),
)
keyword[except] identifier[TemplateDoesNotExist] :
identifier[html] = literal[string]
keyword[try] :
identifier[text] = identifier[render_to_string] (
identifier[context] = identifier[context] ,
identifier[template_name] = literal[string] . identifier[format] ( identifier[template_name] ),
)
keyword[except] identifier[TemplateDoesNotExist] :
identifier[text] = literal[string]
keyword[if] keyword[not] identifier[html] keyword[and] keyword[not] identifier[text] :
keyword[raise] identifier[NoTemplatesException] ( identifier[template_name] )
keyword[return] identifier[mail] . identifier[send_mail] (
* identifier[args] ,
identifier[html_message] = identifier[html] ,
identifier[message] = identifier[text] ,
** identifier[kwargs]
) | def send_email(template_name, context=None, *args, **kwargs):
"""
Send a templated email.
To generate the message used for the email, the method first
searches for an HTML template with the given name
(eg: <template>.html), and renders it with the provided context. The
process is repeated for the plain text message except a 'txt'
extension is used. All other options are forwarded to Django's
``send_mail`` function.
Args:
template_name:
The name of the template to use without an extension. The
extensions ``html`` and ``txt`` are appended to the template
name and then rendered to provide the email content.
context:
A dictionary containing the context to render the message
with. Defaults to an empty dictionary.
Returns:
``1`` if the email is succesfully sent and ``0`` otherwise. The
return values come from Django's ``send_mail`` function.
Throws:
NoTemplatesException:
If neither the HTML nor plain text template can be loaded.
"""
context = context or {}
try:
html = render_to_string(context=context, template_name='{}.html'.format(template_name)) # depends on [control=['try'], data=[]]
except TemplateDoesNotExist:
html = '' # depends on [control=['except'], data=[]]
try:
text = render_to_string(context=context, template_name='{}.txt'.format(template_name)) # depends on [control=['try'], data=[]]
except TemplateDoesNotExist:
text = '' # depends on [control=['except'], data=[]]
if not html and (not text):
raise NoTemplatesException(template_name) # depends on [control=['if'], data=[]]
return mail.send_mail(*args, html_message=html, message=text, **kwargs) |
def autoconnect_thread(monitor):
"""Thread which detects USB Serial devices connecting and disconnecting."""
monitor.start()
monitor.filter_by('tty')
epoll = select.epoll()
epoll.register(monitor.fileno(), select.POLLIN)
while True:
try:
events = epoll.poll()
except InterruptedError:
continue
for fileno, _ in events:
if fileno == monitor.fileno():
usb_dev = monitor.poll()
print('autoconnect: {} action: {}'.format(usb_dev.device_node, usb_dev.action))
dev = find_serial_device_by_port(usb_dev.device_node)
if usb_dev.action == 'add':
# Try connecting a few times. Sometimes the serial port
# reports itself as busy, which causes the connection to fail.
for i in range(8):
if dev:
connected = connect_serial(dev.port, dev.baud, dev.wait)
elif is_micropython_usb_device(usb_dev):
connected = connect_serial(usb_dev.device_node)
else:
connected = False
if connected:
break
time.sleep(0.25)
elif usb_dev.action == 'remove':
print('')
print("USB Serial device '%s' disconnected" % usb_dev.device_node)
if dev:
dev.close()
break | def function[autoconnect_thread, parameter[monitor]]:
constant[Thread which detects USB Serial devices connecting and disconnecting.]
call[name[monitor].start, parameter[]]
call[name[monitor].filter_by, parameter[constant[tty]]]
variable[epoll] assign[=] call[name[select].epoll, parameter[]]
call[name[epoll].register, parameter[call[name[monitor].fileno, parameter[]], name[select].POLLIN]]
while constant[True] begin[:]
<ast.Try object at 0x7da2047eb190>
for taget[tuple[[<ast.Name object at 0x7da2047e9300>, <ast.Name object at 0x7da2047e9c90>]]] in starred[name[events]] begin[:]
if compare[name[fileno] equal[==] call[name[monitor].fileno, parameter[]]] begin[:]
variable[usb_dev] assign[=] call[name[monitor].poll, parameter[]]
call[name[print], parameter[call[constant[autoconnect: {} action: {}].format, parameter[name[usb_dev].device_node, name[usb_dev].action]]]]
variable[dev] assign[=] call[name[find_serial_device_by_port], parameter[name[usb_dev].device_node]]
if compare[name[usb_dev].action equal[==] constant[add]] begin[:]
for taget[name[i]] in starred[call[name[range], parameter[constant[8]]]] begin[:]
if name[dev] begin[:]
variable[connected] assign[=] call[name[connect_serial], parameter[name[dev].port, name[dev].baud, name[dev].wait]]
if name[connected] begin[:]
break
call[name[time].sleep, parameter[constant[0.25]]] | keyword[def] identifier[autoconnect_thread] ( identifier[monitor] ):
literal[string]
identifier[monitor] . identifier[start] ()
identifier[monitor] . identifier[filter_by] ( literal[string] )
identifier[epoll] = identifier[select] . identifier[epoll] ()
identifier[epoll] . identifier[register] ( identifier[monitor] . identifier[fileno] (), identifier[select] . identifier[POLLIN] )
keyword[while] keyword[True] :
keyword[try] :
identifier[events] = identifier[epoll] . identifier[poll] ()
keyword[except] identifier[InterruptedError] :
keyword[continue]
keyword[for] identifier[fileno] , identifier[_] keyword[in] identifier[events] :
keyword[if] identifier[fileno] == identifier[monitor] . identifier[fileno] ():
identifier[usb_dev] = identifier[monitor] . identifier[poll] ()
identifier[print] ( literal[string] . identifier[format] ( identifier[usb_dev] . identifier[device_node] , identifier[usb_dev] . identifier[action] ))
identifier[dev] = identifier[find_serial_device_by_port] ( identifier[usb_dev] . identifier[device_node] )
keyword[if] identifier[usb_dev] . identifier[action] == literal[string] :
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ):
keyword[if] identifier[dev] :
identifier[connected] = identifier[connect_serial] ( identifier[dev] . identifier[port] , identifier[dev] . identifier[baud] , identifier[dev] . identifier[wait] )
keyword[elif] identifier[is_micropython_usb_device] ( identifier[usb_dev] ):
identifier[connected] = identifier[connect_serial] ( identifier[usb_dev] . identifier[device_node] )
keyword[else] :
identifier[connected] = keyword[False]
keyword[if] identifier[connected] :
keyword[break]
identifier[time] . identifier[sleep] ( literal[int] )
keyword[elif] identifier[usb_dev] . identifier[action] == literal[string] :
identifier[print] ( literal[string] )
identifier[print] ( literal[string] % identifier[usb_dev] . identifier[device_node] )
keyword[if] identifier[dev] :
identifier[dev] . identifier[close] ()
keyword[break] | def autoconnect_thread(monitor):
"""Thread which detects USB Serial devices connecting and disconnecting."""
monitor.start()
monitor.filter_by('tty')
epoll = select.epoll()
epoll.register(monitor.fileno(), select.POLLIN)
while True:
try:
events = epoll.poll() # depends on [control=['try'], data=[]]
except InterruptedError:
continue # depends on [control=['except'], data=[]]
for (fileno, _) in events:
if fileno == monitor.fileno():
usb_dev = monitor.poll()
print('autoconnect: {} action: {}'.format(usb_dev.device_node, usb_dev.action))
dev = find_serial_device_by_port(usb_dev.device_node)
if usb_dev.action == 'add':
# Try connecting a few times. Sometimes the serial port
# reports itself as busy, which causes the connection to fail.
for i in range(8):
if dev:
connected = connect_serial(dev.port, dev.baud, dev.wait) # depends on [control=['if'], data=[]]
elif is_micropython_usb_device(usb_dev):
connected = connect_serial(usb_dev.device_node) # depends on [control=['if'], data=[]]
else:
connected = False
if connected:
break # depends on [control=['if'], data=[]]
time.sleep(0.25) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
elif usb_dev.action == 'remove':
print('')
print("USB Serial device '%s' disconnected" % usb_dev.device_node)
if dev:
dev.close()
break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['while'], data=[]] |
def do_ls(self, params):
"""
\x1b[1mNAME\x1b[0m
ls - Lists the znodes for the given <path>
\x1b[1mSYNOPSIS\x1b[0m
ls <path> [watch] [sep]
\x1b[1mOPTIONS\x1b[0m
* watch: set a (child) watch on the path (default: false)
* sep: separator to be used (default: '\\n')
\x1b[1mEXAMPLES\x1b[0m
> ls /
configs
zookeeper
Setting a watch:
> ls / true
configs
zookeeper
> create /foo 'bar'
WatchedEvent(type='CHILD', state='CONNECTED', path=u'/')
> ls / false ,
configs,zookeeper
"""
watcher = lambda evt: self.show_output(str(evt))
kwargs = {"watch": watcher} if params.watch else {}
znodes = self._zk.get_children(params.path, **kwargs)
self.show_output(params.sep.join(sorted(znodes))) | def function[do_ls, parameter[self, params]]:
constant[
[1mNAME[0m
ls - Lists the znodes for the given <path>
[1mSYNOPSIS[0m
ls <path> [watch] [sep]
[1mOPTIONS[0m
* watch: set a (child) watch on the path (default: false)
* sep: separator to be used (default: '\n')
[1mEXAMPLES[0m
> ls /
configs
zookeeper
Setting a watch:
> ls / true
configs
zookeeper
> create /foo 'bar'
WatchedEvent(type='CHILD', state='CONNECTED', path=u'/')
> ls / false ,
configs,zookeeper
]
variable[watcher] assign[=] <ast.Lambda object at 0x7da18f00c940>
variable[kwargs] assign[=] <ast.IfExp object at 0x7da18f00cbe0>
variable[znodes] assign[=] call[name[self]._zk.get_children, parameter[name[params].path]]
call[name[self].show_output, parameter[call[name[params].sep.join, parameter[call[name[sorted], parameter[name[znodes]]]]]]] | keyword[def] identifier[do_ls] ( identifier[self] , identifier[params] ):
literal[string]
identifier[watcher] = keyword[lambda] identifier[evt] : identifier[self] . identifier[show_output] ( identifier[str] ( identifier[evt] ))
identifier[kwargs] ={ literal[string] : identifier[watcher] } keyword[if] identifier[params] . identifier[watch] keyword[else] {}
identifier[znodes] = identifier[self] . identifier[_zk] . identifier[get_children] ( identifier[params] . identifier[path] ,** identifier[kwargs] )
identifier[self] . identifier[show_output] ( identifier[params] . identifier[sep] . identifier[join] ( identifier[sorted] ( identifier[znodes] ))) | def do_ls(self, params):
"""
\x1b[1mNAME\x1b[0m
ls - Lists the znodes for the given <path>
\x1b[1mSYNOPSIS\x1b[0m
ls <path> [watch] [sep]
\x1b[1mOPTIONS\x1b[0m
* watch: set a (child) watch on the path (default: false)
* sep: separator to be used (default: '\\n')
\x1b[1mEXAMPLES\x1b[0m
> ls /
configs
zookeeper
Setting a watch:
> ls / true
configs
zookeeper
> create /foo 'bar'
WatchedEvent(type='CHILD', state='CONNECTED', path=u'/')
> ls / false ,
configs,zookeeper
"""
watcher = lambda evt: self.show_output(str(evt))
kwargs = {'watch': watcher} if params.watch else {}
znodes = self._zk.get_children(params.path, **kwargs)
self.show_output(params.sep.join(sorted(znodes))) |
def do_region(self, x, y, w, h):
"""Apply region selection."""
if (x is None):
self.logger.debug("region: full (nop)")
else:
self.logger.debug("region: (%d,%d,%d,%d)" % (x, y, w, h))
self.image = self.image.crop((x, y, x + w, y + h))
self.width = w
self.height = h | def function[do_region, parameter[self, x, y, w, h]]:
constant[Apply region selection.]
if compare[name[x] is constant[None]] begin[:]
call[name[self].logger.debug, parameter[constant[region: full (nop)]]] | keyword[def] identifier[do_region] ( identifier[self] , identifier[x] , identifier[y] , identifier[w] , identifier[h] ):
literal[string]
keyword[if] ( identifier[x] keyword[is] keyword[None] ):
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] )
keyword[else] :
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] %( identifier[x] , identifier[y] , identifier[w] , identifier[h] ))
identifier[self] . identifier[image] = identifier[self] . identifier[image] . identifier[crop] (( identifier[x] , identifier[y] , identifier[x] + identifier[w] , identifier[y] + identifier[h] ))
identifier[self] . identifier[width] = identifier[w]
identifier[self] . identifier[height] = identifier[h] | def do_region(self, x, y, w, h):
"""Apply region selection."""
if x is None:
self.logger.debug('region: full (nop)') # depends on [control=['if'], data=[]]
else:
self.logger.debug('region: (%d,%d,%d,%d)' % (x, y, w, h))
self.image = self.image.crop((x, y, x + w, y + h))
self.width = w
self.height = h |
def _determine_scaling_policies(scaling_policies, scaling_policies_from_pillar):
'''
helper method for present. ensure that scaling_policies are set
'''
pillar_scaling_policies = copy.deepcopy(
__salt__['config.option'](scaling_policies_from_pillar, {})
)
if not scaling_policies and pillar_scaling_policies:
scaling_policies = pillar_scaling_policies
return scaling_policies | def function[_determine_scaling_policies, parameter[scaling_policies, scaling_policies_from_pillar]]:
constant[
helper method for present. ensure that scaling_policies are set
]
variable[pillar_scaling_policies] assign[=] call[name[copy].deepcopy, parameter[call[call[name[__salt__]][constant[config.option]], parameter[name[scaling_policies_from_pillar], dictionary[[], []]]]]]
if <ast.BoolOp object at 0x7da204962470> begin[:]
variable[scaling_policies] assign[=] name[pillar_scaling_policies]
return[name[scaling_policies]] | keyword[def] identifier[_determine_scaling_policies] ( identifier[scaling_policies] , identifier[scaling_policies_from_pillar] ):
literal[string]
identifier[pillar_scaling_policies] = identifier[copy] . identifier[deepcopy] (
identifier[__salt__] [ literal[string] ]( identifier[scaling_policies_from_pillar] ,{})
)
keyword[if] keyword[not] identifier[scaling_policies] keyword[and] identifier[pillar_scaling_policies] :
identifier[scaling_policies] = identifier[pillar_scaling_policies]
keyword[return] identifier[scaling_policies] | def _determine_scaling_policies(scaling_policies, scaling_policies_from_pillar):
"""
helper method for present. ensure that scaling_policies are set
"""
pillar_scaling_policies = copy.deepcopy(__salt__['config.option'](scaling_policies_from_pillar, {}))
if not scaling_policies and pillar_scaling_policies:
scaling_policies = pillar_scaling_policies # depends on [control=['if'], data=[]]
return scaling_policies |
def delete_tag(self, project, repository, tag_name):
"""
Creates a tag using the information provided in the {@link RestCreateTagRequest request}
The authenticated user must have REPO_WRITE permission for the context repository to call this resource.
:param project:
:param repository:
:param tag_name:
:return:
"""
url = 'rest/git/1.0/projects/{project}/repos/{repository}/tags/{tag}'.format(project=project,
repository=repository,
tag=tag_name)
return self.delete(url) | def function[delete_tag, parameter[self, project, repository, tag_name]]:
constant[
Creates a tag using the information provided in the {@link RestCreateTagRequest request}
The authenticated user must have REPO_WRITE permission for the context repository to call this resource.
:param project:
:param repository:
:param tag_name:
:return:
]
variable[url] assign[=] call[constant[rest/git/1.0/projects/{project}/repos/{repository}/tags/{tag}].format, parameter[]]
return[call[name[self].delete, parameter[name[url]]]] | keyword[def] identifier[delete_tag] ( identifier[self] , identifier[project] , identifier[repository] , identifier[tag_name] ):
literal[string]
identifier[url] = literal[string] . identifier[format] ( identifier[project] = identifier[project] ,
identifier[repository] = identifier[repository] ,
identifier[tag] = identifier[tag_name] )
keyword[return] identifier[self] . identifier[delete] ( identifier[url] ) | def delete_tag(self, project, repository, tag_name):
"""
Creates a tag using the information provided in the {@link RestCreateTagRequest request}
The authenticated user must have REPO_WRITE permission for the context repository to call this resource.
:param project:
:param repository:
:param tag_name:
:return:
"""
url = 'rest/git/1.0/projects/{project}/repos/{repository}/tags/{tag}'.format(project=project, repository=repository, tag=tag_name)
return self.delete(url) |
def _tiff_from_exif_segment(cls, stream, offset, segment_length):
"""
Return a |Tiff| instance parsed from the Exif APP1 segment of
*segment_length* at *offset* in *stream*.
"""
# wrap full segment in its own stream and feed to Tiff()
stream.seek(offset+8)
segment_bytes = stream.read(segment_length-8)
substream = BytesIO(segment_bytes)
return Tiff.from_stream(substream) | def function[_tiff_from_exif_segment, parameter[cls, stream, offset, segment_length]]:
constant[
Return a |Tiff| instance parsed from the Exif APP1 segment of
*segment_length* at *offset* in *stream*.
]
call[name[stream].seek, parameter[binary_operation[name[offset] + constant[8]]]]
variable[segment_bytes] assign[=] call[name[stream].read, parameter[binary_operation[name[segment_length] - constant[8]]]]
variable[substream] assign[=] call[name[BytesIO], parameter[name[segment_bytes]]]
return[call[name[Tiff].from_stream, parameter[name[substream]]]] | keyword[def] identifier[_tiff_from_exif_segment] ( identifier[cls] , identifier[stream] , identifier[offset] , identifier[segment_length] ):
literal[string]
identifier[stream] . identifier[seek] ( identifier[offset] + literal[int] )
identifier[segment_bytes] = identifier[stream] . identifier[read] ( identifier[segment_length] - literal[int] )
identifier[substream] = identifier[BytesIO] ( identifier[segment_bytes] )
keyword[return] identifier[Tiff] . identifier[from_stream] ( identifier[substream] ) | def _tiff_from_exif_segment(cls, stream, offset, segment_length):
"""
Return a |Tiff| instance parsed from the Exif APP1 segment of
*segment_length* at *offset* in *stream*.
"""
# wrap full segment in its own stream and feed to Tiff()
stream.seek(offset + 8)
segment_bytes = stream.read(segment_length - 8)
substream = BytesIO(segment_bytes)
return Tiff.from_stream(substream) |
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(NetstatCollector, self).get_default_config()
config.update({
'path': 'netstat',
})
return config | def function[get_default_config, parameter[self]]:
constant[
Returns the default collector settings
]
variable[config] assign[=] call[call[name[super], parameter[name[NetstatCollector], name[self]]].get_default_config, parameter[]]
call[name[config].update, parameter[dictionary[[<ast.Constant object at 0x7da18dc05de0>], [<ast.Constant object at 0x7da18dc04b50>]]]]
return[name[config]] | keyword[def] identifier[get_default_config] ( identifier[self] ):
literal[string]
identifier[config] = identifier[super] ( identifier[NetstatCollector] , identifier[self] ). identifier[get_default_config] ()
identifier[config] . identifier[update] ({
literal[string] : literal[string] ,
})
keyword[return] identifier[config] | def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(NetstatCollector, self).get_default_config()
config.update({'path': 'netstat'})
return config |
def sched(self):
""" Yield CPU.
This will choose another process from the RUNNNIG list and change
current running process. May give the same cpu if only one running
process.
"""
if len(self.procs) > 1:
logger.info("SCHED:")
logger.info("\tProcess: %r", self.procs)
logger.info("\tRunning: %r", self.running)
logger.info("\tRWait: %r", self.rwait)
logger.info("\tTWait: %r", self.twait)
logger.info("\tTimers: %r", self.timers)
logger.info("\tCurrent clock: %d", self.clocks)
logger.info("\tCurrent cpu: %d", self._current)
if len(self.running) == 0:
logger.info("None running checking if there is some process waiting for a timeout")
if all([x is None for x in self.timers]):
raise Deadlock()
self.clocks = min([x for x in self.timers if x is not None]) + 1
self.check_timers()
assert len(self.running) != 0, "DEADLOCK!"
self._current = self.running[0]
return
next_index = (self.running.index(self._current) + 1) % len(self.running)
next = self.running[next_index]
if len(self.procs) > 1:
logger.info("\tTransfer control from process %d to %d", self._current, next)
self._current = next | def function[sched, parameter[self]]:
constant[ Yield CPU.
This will choose another process from the RUNNNIG list and change
current running process. May give the same cpu if only one running
process.
]
if compare[call[name[len], parameter[name[self].procs]] greater[>] constant[1]] begin[:]
call[name[logger].info, parameter[constant[SCHED:]]]
call[name[logger].info, parameter[constant[ Process: %r], name[self].procs]]
call[name[logger].info, parameter[constant[ Running: %r], name[self].running]]
call[name[logger].info, parameter[constant[ RWait: %r], name[self].rwait]]
call[name[logger].info, parameter[constant[ TWait: %r], name[self].twait]]
call[name[logger].info, parameter[constant[ Timers: %r], name[self].timers]]
call[name[logger].info, parameter[constant[ Current clock: %d], name[self].clocks]]
call[name[logger].info, parameter[constant[ Current cpu: %d], name[self]._current]]
if compare[call[name[len], parameter[name[self].running]] equal[==] constant[0]] begin[:]
call[name[logger].info, parameter[constant[None running checking if there is some process waiting for a timeout]]]
if call[name[all], parameter[<ast.ListComp object at 0x7da20e9553f0>]] begin[:]
<ast.Raise object at 0x7da1b0087250>
name[self].clocks assign[=] binary_operation[call[name[min], parameter[<ast.ListComp object at 0x7da1b0084c70>]] + constant[1]]
call[name[self].check_timers, parameter[]]
assert[compare[call[name[len], parameter[name[self].running]] not_equal[!=] constant[0]]]
name[self]._current assign[=] call[name[self].running][constant[0]]
return[None]
variable[next_index] assign[=] binary_operation[binary_operation[call[name[self].running.index, parameter[name[self]._current]] + constant[1]] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[self].running]]]
variable[next] assign[=] call[name[self].running][name[next_index]]
if compare[call[name[len], parameter[name[self].procs]] greater[>] constant[1]] begin[:]
call[name[logger].info, parameter[constant[ Transfer control from process %d to %d], name[self]._current, name[next]]]
name[self]._current assign[=] name[next] | keyword[def] identifier[sched] ( identifier[self] ):
literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[procs] )> literal[int] :
identifier[logger] . identifier[info] ( literal[string] )
identifier[logger] . identifier[info] ( literal[string] , identifier[self] . identifier[procs] )
identifier[logger] . identifier[info] ( literal[string] , identifier[self] . identifier[running] )
identifier[logger] . identifier[info] ( literal[string] , identifier[self] . identifier[rwait] )
identifier[logger] . identifier[info] ( literal[string] , identifier[self] . identifier[twait] )
identifier[logger] . identifier[info] ( literal[string] , identifier[self] . identifier[timers] )
identifier[logger] . identifier[info] ( literal[string] , identifier[self] . identifier[clocks] )
identifier[logger] . identifier[info] ( literal[string] , identifier[self] . identifier[_current] )
keyword[if] identifier[len] ( identifier[self] . identifier[running] )== literal[int] :
identifier[logger] . identifier[info] ( literal[string] )
keyword[if] identifier[all] ([ identifier[x] keyword[is] keyword[None] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[timers] ]):
keyword[raise] identifier[Deadlock] ()
identifier[self] . identifier[clocks] = identifier[min] ([ identifier[x] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[timers] keyword[if] identifier[x] keyword[is] keyword[not] keyword[None] ])+ literal[int]
identifier[self] . identifier[check_timers] ()
keyword[assert] identifier[len] ( identifier[self] . identifier[running] )!= literal[int] , literal[string]
identifier[self] . identifier[_current] = identifier[self] . identifier[running] [ literal[int] ]
keyword[return]
identifier[next_index] =( identifier[self] . identifier[running] . identifier[index] ( identifier[self] . identifier[_current] )+ literal[int] )% identifier[len] ( identifier[self] . identifier[running] )
identifier[next] = identifier[self] . identifier[running] [ identifier[next_index] ]
keyword[if] identifier[len] ( identifier[self] . identifier[procs] )> literal[int] :
identifier[logger] . identifier[info] ( literal[string] , identifier[self] . identifier[_current] , identifier[next] )
identifier[self] . identifier[_current] = identifier[next] | def sched(self):
""" Yield CPU.
This will choose another process from the RUNNNIG list and change
current running process. May give the same cpu if only one running
process.
"""
if len(self.procs) > 1:
logger.info('SCHED:')
logger.info('\tProcess: %r', self.procs)
logger.info('\tRunning: %r', self.running)
logger.info('\tRWait: %r', self.rwait)
logger.info('\tTWait: %r', self.twait)
logger.info('\tTimers: %r', self.timers)
logger.info('\tCurrent clock: %d', self.clocks)
logger.info('\tCurrent cpu: %d', self._current) # depends on [control=['if'], data=[]]
if len(self.running) == 0:
logger.info('None running checking if there is some process waiting for a timeout')
if all([x is None for x in self.timers]):
raise Deadlock() # depends on [control=['if'], data=[]]
self.clocks = min([x for x in self.timers if x is not None]) + 1
self.check_timers()
assert len(self.running) != 0, 'DEADLOCK!'
self._current = self.running[0]
return # depends on [control=['if'], data=[]]
next_index = (self.running.index(self._current) + 1) % len(self.running)
next = self.running[next_index]
if len(self.procs) > 1:
logger.info('\tTransfer control from process %d to %d', self._current, next) # depends on [control=['if'], data=[]]
self._current = next |
def to_ogc_wkt(self):
"""
Returns the CS as a OGC WKT formatted string.
"""
string = 'PROJCS["%s", %s, %s, ' % (self.name, self.geogcs.to_ogc_wkt(), self.proj.to_ogc_wkt() )
string += ", ".join(param.to_ogc_wkt() for param in self.params)
string += ', %s' % self.unit.to_ogc_wkt()
string += ', AXIS["X", %s], AXIS["Y", %s]]' % (self.twin_ax[0].ogc_wkt, self.twin_ax[1].ogc_wkt )
return string | def function[to_ogc_wkt, parameter[self]]:
constant[
Returns the CS as a OGC WKT formatted string.
]
variable[string] assign[=] binary_operation[constant[PROJCS["%s", %s, %s, ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da18c4cf280>, <ast.Call object at 0x7da18c4cfdc0>, <ast.Call object at 0x7da18c4cf7c0>]]]
<ast.AugAssign object at 0x7da18c4ccfa0>
<ast.AugAssign object at 0x7da1b025d660>
<ast.AugAssign object at 0x7da1b025c760>
return[name[string]] | keyword[def] identifier[to_ogc_wkt] ( identifier[self] ):
literal[string]
identifier[string] = literal[string] %( identifier[self] . identifier[name] , identifier[self] . identifier[geogcs] . identifier[to_ogc_wkt] (), identifier[self] . identifier[proj] . identifier[to_ogc_wkt] ())
identifier[string] += literal[string] . identifier[join] ( identifier[param] . identifier[to_ogc_wkt] () keyword[for] identifier[param] keyword[in] identifier[self] . identifier[params] )
identifier[string] += literal[string] % identifier[self] . identifier[unit] . identifier[to_ogc_wkt] ()
identifier[string] += literal[string] %( identifier[self] . identifier[twin_ax] [ literal[int] ]. identifier[ogc_wkt] , identifier[self] . identifier[twin_ax] [ literal[int] ]. identifier[ogc_wkt] )
keyword[return] identifier[string] | def to_ogc_wkt(self):
"""
Returns the CS as a OGC WKT formatted string.
"""
string = 'PROJCS["%s", %s, %s, ' % (self.name, self.geogcs.to_ogc_wkt(), self.proj.to_ogc_wkt())
string += ', '.join((param.to_ogc_wkt() for param in self.params))
string += ', %s' % self.unit.to_ogc_wkt()
string += ', AXIS["X", %s], AXIS["Y", %s]]' % (self.twin_ax[0].ogc_wkt, self.twin_ax[1].ogc_wkt)
return string |
def get_D_square_min(atoms_now, atoms_old, i_now, j_now, delta_plus_epsilon=None):
"""
Calculate the D^2_min norm of Falk and Langer
"""
nat = len(atoms_now)
assert len(atoms_now) == len(atoms_old)
pos_now = atoms_now.positions
pos_old = atoms_old.positions
# Compute current and old distance vectors. Note that current distance
# vectors cannot be taken from the neighbor calculation, because neighbors
# are calculated from the sheared cell while these distance need to come
# from the unsheared cell. Taking the distance from the unsheared cell
# make periodic boundary conditions (and flipping of cell) a lot easier.
dr_now = mic(pos_now[i_now] - pos_now[j_now], atoms_now.cell)
dr_old = mic(pos_old[i_now] - pos_old[j_now], atoms_old.cell)
# Sanity check: Shape needs to be identical!
assert dr_now.shape == dr_old.shape
if delta_plus_epsilon is None:
# Get minimum strain tensor
delta_plus_epsilon = get_delta_plus_epsilon(nat, i_now, dr_now, dr_old)
# Spread epsilon out for each neighbor index
delta_plus_epsilon_n = delta_plus_epsilon[i_now]
# Compute D^2_min
d_sq_n = np.sum(
(
dr_now-
np.sum(delta_plus_epsilon_n.reshape(-1,3,3)*dr_old.reshape(-1,1,3),
axis=2)
)**2,
axis=1)
# For each atom, sum over all neighbors
d_sq = np.bincount(i_now, weights=d_sq_n)
return delta_plus_epsilon, d_sq | def function[get_D_square_min, parameter[atoms_now, atoms_old, i_now, j_now, delta_plus_epsilon]]:
constant[
Calculate the D^2_min norm of Falk and Langer
]
variable[nat] assign[=] call[name[len], parameter[name[atoms_now]]]
assert[compare[call[name[len], parameter[name[atoms_now]]] equal[==] call[name[len], parameter[name[atoms_old]]]]]
variable[pos_now] assign[=] name[atoms_now].positions
variable[pos_old] assign[=] name[atoms_old].positions
variable[dr_now] assign[=] call[name[mic], parameter[binary_operation[call[name[pos_now]][name[i_now]] - call[name[pos_now]][name[j_now]]], name[atoms_now].cell]]
variable[dr_old] assign[=] call[name[mic], parameter[binary_operation[call[name[pos_old]][name[i_now]] - call[name[pos_old]][name[j_now]]], name[atoms_old].cell]]
assert[compare[name[dr_now].shape equal[==] name[dr_old].shape]]
if compare[name[delta_plus_epsilon] is constant[None]] begin[:]
variable[delta_plus_epsilon] assign[=] call[name[get_delta_plus_epsilon], parameter[name[nat], name[i_now], name[dr_now], name[dr_old]]]
variable[delta_plus_epsilon_n] assign[=] call[name[delta_plus_epsilon]][name[i_now]]
variable[d_sq_n] assign[=] call[name[np].sum, parameter[binary_operation[binary_operation[name[dr_now] - call[name[np].sum, parameter[binary_operation[call[name[delta_plus_epsilon_n].reshape, parameter[<ast.UnaryOp object at 0x7da1b1236ce0>, constant[3], constant[3]]] * call[name[dr_old].reshape, parameter[<ast.UnaryOp object at 0x7da1b1236b30>, constant[1], constant[3]]]]]]] ** constant[2]]]]
variable[d_sq] assign[=] call[name[np].bincount, parameter[name[i_now]]]
return[tuple[[<ast.Name object at 0x7da1b12377f0>, <ast.Name object at 0x7da1b12340a0>]]] | keyword[def] identifier[get_D_square_min] ( identifier[atoms_now] , identifier[atoms_old] , identifier[i_now] , identifier[j_now] , identifier[delta_plus_epsilon] = keyword[None] ):
literal[string]
identifier[nat] = identifier[len] ( identifier[atoms_now] )
keyword[assert] identifier[len] ( identifier[atoms_now] )== identifier[len] ( identifier[atoms_old] )
identifier[pos_now] = identifier[atoms_now] . identifier[positions]
identifier[pos_old] = identifier[atoms_old] . identifier[positions]
identifier[dr_now] = identifier[mic] ( identifier[pos_now] [ identifier[i_now] ]- identifier[pos_now] [ identifier[j_now] ], identifier[atoms_now] . identifier[cell] )
identifier[dr_old] = identifier[mic] ( identifier[pos_old] [ identifier[i_now] ]- identifier[pos_old] [ identifier[j_now] ], identifier[atoms_old] . identifier[cell] )
keyword[assert] identifier[dr_now] . identifier[shape] == identifier[dr_old] . identifier[shape]
keyword[if] identifier[delta_plus_epsilon] keyword[is] keyword[None] :
identifier[delta_plus_epsilon] = identifier[get_delta_plus_epsilon] ( identifier[nat] , identifier[i_now] , identifier[dr_now] , identifier[dr_old] )
identifier[delta_plus_epsilon_n] = identifier[delta_plus_epsilon] [ identifier[i_now] ]
identifier[d_sq_n] = identifier[np] . identifier[sum] (
(
identifier[dr_now] -
identifier[np] . identifier[sum] ( identifier[delta_plus_epsilon_n] . identifier[reshape] (- literal[int] , literal[int] , literal[int] )* identifier[dr_old] . identifier[reshape] (- literal[int] , literal[int] , literal[int] ),
identifier[axis] = literal[int] )
)** literal[int] ,
identifier[axis] = literal[int] )
identifier[d_sq] = identifier[np] . identifier[bincount] ( identifier[i_now] , identifier[weights] = identifier[d_sq_n] )
keyword[return] identifier[delta_plus_epsilon] , identifier[d_sq] | def get_D_square_min(atoms_now, atoms_old, i_now, j_now, delta_plus_epsilon=None):
"""
Calculate the D^2_min norm of Falk and Langer
"""
nat = len(atoms_now)
assert len(atoms_now) == len(atoms_old)
pos_now = atoms_now.positions
pos_old = atoms_old.positions
# Compute current and old distance vectors. Note that current distance
# vectors cannot be taken from the neighbor calculation, because neighbors
# are calculated from the sheared cell while these distance need to come
# from the unsheared cell. Taking the distance from the unsheared cell
# make periodic boundary conditions (and flipping of cell) a lot easier.
dr_now = mic(pos_now[i_now] - pos_now[j_now], atoms_now.cell)
dr_old = mic(pos_old[i_now] - pos_old[j_now], atoms_old.cell)
# Sanity check: Shape needs to be identical!
assert dr_now.shape == dr_old.shape
if delta_plus_epsilon is None:
# Get minimum strain tensor
delta_plus_epsilon = get_delta_plus_epsilon(nat, i_now, dr_now, dr_old) # depends on [control=['if'], data=['delta_plus_epsilon']]
# Spread epsilon out for each neighbor index
delta_plus_epsilon_n = delta_plus_epsilon[i_now]
# Compute D^2_min
d_sq_n = np.sum((dr_now - np.sum(delta_plus_epsilon_n.reshape(-1, 3, 3) * dr_old.reshape(-1, 1, 3), axis=2)) ** 2, axis=1)
# For each atom, sum over all neighbors
d_sq = np.bincount(i_now, weights=d_sq_n)
return (delta_plus_epsilon, d_sq) |
def dafrda(handle, begin, end):
"""
Read the double precision data bounded by two addresses within a DAF.
Deprecated: This routine has been superseded by :func:`dafgda` and
:func:`dafgsr`. This routine is supported for purposes of backward
compatibility only.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafrda_c.html
:param handle: Handle of a DAF.
:type handle: int
:param begin: Initial address within file.
:type begin: int
:param end: Final address within file.
:type end: int
:return: Data contained between begin and end.
:rtype: Array of floats
"""
handle = ctypes.c_int(handle)
begin = ctypes.c_int(begin)
end = ctypes.c_int(end)
data = stypes.emptyDoubleVector(1 + end.value - begin.value)
libspice.dafrda_c(handle, begin, end, data)
return stypes.cVectorToPython(data) | def function[dafrda, parameter[handle, begin, end]]:
constant[
Read the double precision data bounded by two addresses within a DAF.
Deprecated: This routine has been superseded by :func:`dafgda` and
:func:`dafgsr`. This routine is supported for purposes of backward
compatibility only.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafrda_c.html
:param handle: Handle of a DAF.
:type handle: int
:param begin: Initial address within file.
:type begin: int
:param end: Final address within file.
:type end: int
:return: Data contained between begin and end.
:rtype: Array of floats
]
variable[handle] assign[=] call[name[ctypes].c_int, parameter[name[handle]]]
variable[begin] assign[=] call[name[ctypes].c_int, parameter[name[begin]]]
variable[end] assign[=] call[name[ctypes].c_int, parameter[name[end]]]
variable[data] assign[=] call[name[stypes].emptyDoubleVector, parameter[binary_operation[binary_operation[constant[1] + name[end].value] - name[begin].value]]]
call[name[libspice].dafrda_c, parameter[name[handle], name[begin], name[end], name[data]]]
return[call[name[stypes].cVectorToPython, parameter[name[data]]]] | keyword[def] identifier[dafrda] ( identifier[handle] , identifier[begin] , identifier[end] ):
literal[string]
identifier[handle] = identifier[ctypes] . identifier[c_int] ( identifier[handle] )
identifier[begin] = identifier[ctypes] . identifier[c_int] ( identifier[begin] )
identifier[end] = identifier[ctypes] . identifier[c_int] ( identifier[end] )
identifier[data] = identifier[stypes] . identifier[emptyDoubleVector] ( literal[int] + identifier[end] . identifier[value] - identifier[begin] . identifier[value] )
identifier[libspice] . identifier[dafrda_c] ( identifier[handle] , identifier[begin] , identifier[end] , identifier[data] )
keyword[return] identifier[stypes] . identifier[cVectorToPython] ( identifier[data] ) | def dafrda(handle, begin, end):
"""
Read the double precision data bounded by two addresses within a DAF.
Deprecated: This routine has been superseded by :func:`dafgda` and
:func:`dafgsr`. This routine is supported for purposes of backward
compatibility only.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafrda_c.html
:param handle: Handle of a DAF.
:type handle: int
:param begin: Initial address within file.
:type begin: int
:param end: Final address within file.
:type end: int
:return: Data contained between begin and end.
:rtype: Array of floats
"""
handle = ctypes.c_int(handle)
begin = ctypes.c_int(begin)
end = ctypes.c_int(end)
data = stypes.emptyDoubleVector(1 + end.value - begin.value)
libspice.dafrda_c(handle, begin, end, data)
return stypes.cVectorToPython(data) |
def tob32(val):
"""Return provided 32 bit value as a string of four bytes."""
ret = bytearray(4)
ret[0] = (val>>24)&M8
ret[1] = (val>>16)&M8
ret[2] = (val>>8)&M8
ret[3] = val&M8
return ret | def function[tob32, parameter[val]]:
constant[Return provided 32 bit value as a string of four bytes.]
variable[ret] assign[=] call[name[bytearray], parameter[constant[4]]]
call[name[ret]][constant[0]] assign[=] binary_operation[binary_operation[name[val] <ast.RShift object at 0x7da2590d6a40> constant[24]] <ast.BitAnd object at 0x7da2590d6b60> name[M8]]
call[name[ret]][constant[1]] assign[=] binary_operation[binary_operation[name[val] <ast.RShift object at 0x7da2590d6a40> constant[16]] <ast.BitAnd object at 0x7da2590d6b60> name[M8]]
call[name[ret]][constant[2]] assign[=] binary_operation[binary_operation[name[val] <ast.RShift object at 0x7da2590d6a40> constant[8]] <ast.BitAnd object at 0x7da2590d6b60> name[M8]]
call[name[ret]][constant[3]] assign[=] binary_operation[name[val] <ast.BitAnd object at 0x7da2590d6b60> name[M8]]
return[name[ret]] | keyword[def] identifier[tob32] ( identifier[val] ):
literal[string]
identifier[ret] = identifier[bytearray] ( literal[int] )
identifier[ret] [ literal[int] ]=( identifier[val] >> literal[int] )& identifier[M8]
identifier[ret] [ literal[int] ]=( identifier[val] >> literal[int] )& identifier[M8]
identifier[ret] [ literal[int] ]=( identifier[val] >> literal[int] )& identifier[M8]
identifier[ret] [ literal[int] ]= identifier[val] & identifier[M8]
keyword[return] identifier[ret] | def tob32(val):
"""Return provided 32 bit value as a string of four bytes."""
ret = bytearray(4)
ret[0] = val >> 24 & M8
ret[1] = val >> 16 & M8
ret[2] = val >> 8 & M8
ret[3] = val & M8
return ret |
def setup_address(self, name, address=default, transact={}):
"""
Set up the name to point to the supplied address.
The sender of the transaction must own the name, or
its parent name.
Example: If the caller owns ``parentname.eth`` with no subdomains
and calls this method with ``sub.parentname.eth``,
then ``sub`` will be created as part of this call.
:param str name: ENS name to set up
:param str address: name will point to this address, in checksum format. If ``None``,
erase the record. If not specified, name will point to the owner's address.
:param dict transact: the transaction configuration, like in
:meth:`~web3.eth.Eth.sendTransaction`
:raises InvalidName: if ``name`` has invalid syntax
:raises UnauthorizedError: if ``'from'`` in `transact` does not own `name`
"""
owner = self.setup_owner(name, transact=transact)
self._assert_control(owner, name)
if is_none_or_zero_address(address):
address = None
elif address is default:
address = owner
elif is_binary_address(address):
address = to_checksum_address(address)
elif not is_checksum_address(address):
raise ValueError("You must supply the address in checksum format")
if self.address(name) == address:
return None
if address is None:
address = EMPTY_ADDR_HEX
transact['from'] = owner
resolver = self._set_resolver(name, transact=transact)
return resolver.functions.setAddr(raw_name_to_hash(name), address).transact(transact) | def function[setup_address, parameter[self, name, address, transact]]:
constant[
Set up the name to point to the supplied address.
The sender of the transaction must own the name, or
its parent name.
Example: If the caller owns ``parentname.eth`` with no subdomains
and calls this method with ``sub.parentname.eth``,
then ``sub`` will be created as part of this call.
:param str name: ENS name to set up
:param str address: name will point to this address, in checksum format. If ``None``,
erase the record. If not specified, name will point to the owner's address.
:param dict transact: the transaction configuration, like in
:meth:`~web3.eth.Eth.sendTransaction`
:raises InvalidName: if ``name`` has invalid syntax
:raises UnauthorizedError: if ``'from'`` in `transact` does not own `name`
]
variable[owner] assign[=] call[name[self].setup_owner, parameter[name[name]]]
call[name[self]._assert_control, parameter[name[owner], name[name]]]
if call[name[is_none_or_zero_address], parameter[name[address]]] begin[:]
variable[address] assign[=] constant[None]
if compare[call[name[self].address, parameter[name[name]]] equal[==] name[address]] begin[:]
return[constant[None]]
if compare[name[address] is constant[None]] begin[:]
variable[address] assign[=] name[EMPTY_ADDR_HEX]
call[name[transact]][constant[from]] assign[=] name[owner]
variable[resolver] assign[=] call[name[self]._set_resolver, parameter[name[name]]]
return[call[call[name[resolver].functions.setAddr, parameter[call[name[raw_name_to_hash], parameter[name[name]]], name[address]]].transact, parameter[name[transact]]]] | keyword[def] identifier[setup_address] ( identifier[self] , identifier[name] , identifier[address] = identifier[default] , identifier[transact] ={}):
literal[string]
identifier[owner] = identifier[self] . identifier[setup_owner] ( identifier[name] , identifier[transact] = identifier[transact] )
identifier[self] . identifier[_assert_control] ( identifier[owner] , identifier[name] )
keyword[if] identifier[is_none_or_zero_address] ( identifier[address] ):
identifier[address] = keyword[None]
keyword[elif] identifier[address] keyword[is] identifier[default] :
identifier[address] = identifier[owner]
keyword[elif] identifier[is_binary_address] ( identifier[address] ):
identifier[address] = identifier[to_checksum_address] ( identifier[address] )
keyword[elif] keyword[not] identifier[is_checksum_address] ( identifier[address] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[self] . identifier[address] ( identifier[name] )== identifier[address] :
keyword[return] keyword[None]
keyword[if] identifier[address] keyword[is] keyword[None] :
identifier[address] = identifier[EMPTY_ADDR_HEX]
identifier[transact] [ literal[string] ]= identifier[owner]
identifier[resolver] = identifier[self] . identifier[_set_resolver] ( identifier[name] , identifier[transact] = identifier[transact] )
keyword[return] identifier[resolver] . identifier[functions] . identifier[setAddr] ( identifier[raw_name_to_hash] ( identifier[name] ), identifier[address] ). identifier[transact] ( identifier[transact] ) | def setup_address(self, name, address=default, transact={}):
"""
Set up the name to point to the supplied address.
The sender of the transaction must own the name, or
its parent name.
Example: If the caller owns ``parentname.eth`` with no subdomains
and calls this method with ``sub.parentname.eth``,
then ``sub`` will be created as part of this call.
:param str name: ENS name to set up
:param str address: name will point to this address, in checksum format. If ``None``,
erase the record. If not specified, name will point to the owner's address.
:param dict transact: the transaction configuration, like in
:meth:`~web3.eth.Eth.sendTransaction`
:raises InvalidName: if ``name`` has invalid syntax
:raises UnauthorizedError: if ``'from'`` in `transact` does not own `name`
"""
owner = self.setup_owner(name, transact=transact)
self._assert_control(owner, name)
if is_none_or_zero_address(address):
address = None # depends on [control=['if'], data=[]]
elif address is default:
address = owner # depends on [control=['if'], data=['address']]
elif is_binary_address(address):
address = to_checksum_address(address) # depends on [control=['if'], data=[]]
elif not is_checksum_address(address):
raise ValueError('You must supply the address in checksum format') # depends on [control=['if'], data=[]]
if self.address(name) == address:
return None # depends on [control=['if'], data=[]]
if address is None:
address = EMPTY_ADDR_HEX # depends on [control=['if'], data=['address']]
transact['from'] = owner
resolver = self._set_resolver(name, transact=transact)
return resolver.functions.setAddr(raw_name_to_hash(name), address).transact(transact) |
def smear(self, sigma):
"""
Apply Gaussian smearing to spectrum y value.
Args:
sigma: Std dev for Gaussian smear function
"""
diff = [self.x[i + 1] - self.x[i] for i in range(len(self.x) - 1)]
avg_x_per_step = np.sum(diff) / len(diff)
if len(self.ydim) == 1:
self.y = gaussian_filter1d(self.y, sigma / avg_x_per_step)
else:
self.y = np.array([
gaussian_filter1d(self.y[:, k], sigma / avg_x_per_step)
for k in range(self.ydim[1])]).T | def function[smear, parameter[self, sigma]]:
constant[
Apply Gaussian smearing to spectrum y value.
Args:
sigma: Std dev for Gaussian smear function
]
variable[diff] assign[=] <ast.ListComp object at 0x7da18f00de70>
variable[avg_x_per_step] assign[=] binary_operation[call[name[np].sum, parameter[name[diff]]] / call[name[len], parameter[name[diff]]]]
if compare[call[name[len], parameter[name[self].ydim]] equal[==] constant[1]] begin[:]
name[self].y assign[=] call[name[gaussian_filter1d], parameter[name[self].y, binary_operation[name[sigma] / name[avg_x_per_step]]]] | keyword[def] identifier[smear] ( identifier[self] , identifier[sigma] ):
literal[string]
identifier[diff] =[ identifier[self] . identifier[x] [ identifier[i] + literal[int] ]- identifier[self] . identifier[x] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[x] )- literal[int] )]
identifier[avg_x_per_step] = identifier[np] . identifier[sum] ( identifier[diff] )/ identifier[len] ( identifier[diff] )
keyword[if] identifier[len] ( identifier[self] . identifier[ydim] )== literal[int] :
identifier[self] . identifier[y] = identifier[gaussian_filter1d] ( identifier[self] . identifier[y] , identifier[sigma] / identifier[avg_x_per_step] )
keyword[else] :
identifier[self] . identifier[y] = identifier[np] . identifier[array] ([
identifier[gaussian_filter1d] ( identifier[self] . identifier[y] [:, identifier[k] ], identifier[sigma] / identifier[avg_x_per_step] )
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[self] . identifier[ydim] [ literal[int] ])]). identifier[T] | def smear(self, sigma):
"""
Apply Gaussian smearing to spectrum y value.
Args:
sigma: Std dev for Gaussian smear function
"""
diff = [self.x[i + 1] - self.x[i] for i in range(len(self.x) - 1)]
avg_x_per_step = np.sum(diff) / len(diff)
if len(self.ydim) == 1:
self.y = gaussian_filter1d(self.y, sigma / avg_x_per_step) # depends on [control=['if'], data=[]]
else:
self.y = np.array([gaussian_filter1d(self.y[:, k], sigma / avg_x_per_step) for k in range(self.ydim[1])]).T |
def submit_link(self, title, url):
"""Submit link to this subreddit (POST). Calls :meth:`narwal.Reddit.submit_link`.
:param title: title of submission
:param url: url submission links to
"""
return self._reddit.submit_link(self.display_name, title, url) | def function[submit_link, parameter[self, title, url]]:
constant[Submit link to this subreddit (POST). Calls :meth:`narwal.Reddit.submit_link`.
:param title: title of submission
:param url: url submission links to
]
return[call[name[self]._reddit.submit_link, parameter[name[self].display_name, name[title], name[url]]]] | keyword[def] identifier[submit_link] ( identifier[self] , identifier[title] , identifier[url] ):
literal[string]
keyword[return] identifier[self] . identifier[_reddit] . identifier[submit_link] ( identifier[self] . identifier[display_name] , identifier[title] , identifier[url] ) | def submit_link(self, title, url):
"""Submit link to this subreddit (POST). Calls :meth:`narwal.Reddit.submit_link`.
:param title: title of submission
:param url: url submission links to
"""
return self._reddit.submit_link(self.display_name, title, url) |
def cleanup_service(self, factory, svc_registration):
# type: (Any, ServiceRegistration) -> bool
"""
If this bundle used that factory, releases the reference; else does
nothing
:param factory: The service factory
:param svc_registration: The ServiceRegistration object
:return: True if the bundle was using the factory, else False
"""
svc_ref = svc_registration.get_reference()
try:
# "service" for factories, "services" for prototypes
services, _ = self.__factored.pop(svc_ref)
except KeyError:
return False
else:
if svc_ref.is_prototype() and services:
for service in services:
try:
factory.unget_service_instance(
self.__bundle, svc_registration, service
)
except Exception:
# Ignore instance-level exceptions, potential errors
# will reappear in unget_service()
pass
# Call the factory
factory.unget_service(self.__bundle, svc_registration)
# No more association
svc_ref.unused_by(self.__bundle)
return True | def function[cleanup_service, parameter[self, factory, svc_registration]]:
constant[
If this bundle used that factory, releases the reference; else does
nothing
:param factory: The service factory
:param svc_registration: The ServiceRegistration object
:return: True if the bundle was using the factory, else False
]
variable[svc_ref] assign[=] call[name[svc_registration].get_reference, parameter[]]
<ast.Try object at 0x7da1b03ae020> | keyword[def] identifier[cleanup_service] ( identifier[self] , identifier[factory] , identifier[svc_registration] ):
literal[string]
identifier[svc_ref] = identifier[svc_registration] . identifier[get_reference] ()
keyword[try] :
identifier[services] , identifier[_] = identifier[self] . identifier[__factored] . identifier[pop] ( identifier[svc_ref] )
keyword[except] identifier[KeyError] :
keyword[return] keyword[False]
keyword[else] :
keyword[if] identifier[svc_ref] . identifier[is_prototype] () keyword[and] identifier[services] :
keyword[for] identifier[service] keyword[in] identifier[services] :
keyword[try] :
identifier[factory] . identifier[unget_service_instance] (
identifier[self] . identifier[__bundle] , identifier[svc_registration] , identifier[service]
)
keyword[except] identifier[Exception] :
keyword[pass]
identifier[factory] . identifier[unget_service] ( identifier[self] . identifier[__bundle] , identifier[svc_registration] )
identifier[svc_ref] . identifier[unused_by] ( identifier[self] . identifier[__bundle] )
keyword[return] keyword[True] | def cleanup_service(self, factory, svc_registration):
# type: (Any, ServiceRegistration) -> bool
'\n If this bundle used that factory, releases the reference; else does\n nothing\n\n :param factory: The service factory\n :param svc_registration: The ServiceRegistration object\n :return: True if the bundle was using the factory, else False\n '
svc_ref = svc_registration.get_reference()
try:
# "service" for factories, "services" for prototypes
(services, _) = self.__factored.pop(svc_ref) # depends on [control=['try'], data=[]]
except KeyError:
return False # depends on [control=['except'], data=[]]
else:
if svc_ref.is_prototype() and services:
for service in services:
try:
factory.unget_service_instance(self.__bundle, svc_registration, service) # depends on [control=['try'], data=[]]
except Exception:
# Ignore instance-level exceptions, potential errors
# will reappear in unget_service()
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['service']] # depends on [control=['if'], data=[]]
# Call the factory
factory.unget_service(self.__bundle, svc_registration)
# No more association
svc_ref.unused_by(self.__bundle)
return True |
def del_register_user(self, register_user):
"""
Deletes registration object from database
:param register_user: RegisterUser object to delete
"""
try:
self.get_session.delete(register_user)
self.get_session.commit()
return True
except Exception as e:
log.error(c.LOGMSG_ERR_SEC_DEL_REGISTER_USER.format(str(e)))
self.get_session.rollback()
return False | def function[del_register_user, parameter[self, register_user]]:
constant[
Deletes registration object from database
:param register_user: RegisterUser object to delete
]
<ast.Try object at 0x7da20c7c9030> | keyword[def] identifier[del_register_user] ( identifier[self] , identifier[register_user] ):
literal[string]
keyword[try] :
identifier[self] . identifier[get_session] . identifier[delete] ( identifier[register_user] )
identifier[self] . identifier[get_session] . identifier[commit] ()
keyword[return] keyword[True]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[log] . identifier[error] ( identifier[c] . identifier[LOGMSG_ERR_SEC_DEL_REGISTER_USER] . identifier[format] ( identifier[str] ( identifier[e] )))
identifier[self] . identifier[get_session] . identifier[rollback] ()
keyword[return] keyword[False] | def del_register_user(self, register_user):
"""
Deletes registration object from database
:param register_user: RegisterUser object to delete
"""
try:
self.get_session.delete(register_user)
self.get_session.commit()
return True # depends on [control=['try'], data=[]]
except Exception as e:
log.error(c.LOGMSG_ERR_SEC_DEL_REGISTER_USER.format(str(e)))
self.get_session.rollback()
return False # depends on [control=['except'], data=['e']] |
def _unpack_v2(h5file, stage):
'''Unpack groups from HDF5 v2 file'''
points = np.array(h5file['neuron1/%s/points' % stage])
# from documentation: The /neuron1/structure/unraveled reuses /neuron1/structure/raw
groups_stage = stage if stage != 'unraveled' else 'raw'
groups = np.array(h5file['neuron1/structure/%s' % groups_stage])
stypes = np.array(h5file['neuron1/structure/sectiontype'])
groups = np.hstack([groups, stypes])
groups[:, [1, 2]] = groups[:, [2, 1]]
return points, groups | def function[_unpack_v2, parameter[h5file, stage]]:
constant[Unpack groups from HDF5 v2 file]
variable[points] assign[=] call[name[np].array, parameter[call[name[h5file]][binary_operation[constant[neuron1/%s/points] <ast.Mod object at 0x7da2590d6920> name[stage]]]]]
variable[groups_stage] assign[=] <ast.IfExp object at 0x7da18f09f4f0>
variable[groups] assign[=] call[name[np].array, parameter[call[name[h5file]][binary_operation[constant[neuron1/structure/%s] <ast.Mod object at 0x7da2590d6920> name[groups_stage]]]]]
variable[stypes] assign[=] call[name[np].array, parameter[call[name[h5file]][constant[neuron1/structure/sectiontype]]]]
variable[groups] assign[=] call[name[np].hstack, parameter[list[[<ast.Name object at 0x7da18f09f670>, <ast.Name object at 0x7da18f09fa60>]]]]
call[name[groups]][tuple[[<ast.Slice object at 0x7da18f09d4b0>, <ast.List object at 0x7da18f09f580>]]] assign[=] call[name[groups]][tuple[[<ast.Slice object at 0x7da18bc71db0>, <ast.List object at 0x7da20e9609d0>]]]
return[tuple[[<ast.Name object at 0x7da20e963070>, <ast.Name object at 0x7da20e961720>]]] | keyword[def] identifier[_unpack_v2] ( identifier[h5file] , identifier[stage] ):
literal[string]
identifier[points] = identifier[np] . identifier[array] ( identifier[h5file] [ literal[string] % identifier[stage] ])
identifier[groups_stage] = identifier[stage] keyword[if] identifier[stage] != literal[string] keyword[else] literal[string]
identifier[groups] = identifier[np] . identifier[array] ( identifier[h5file] [ literal[string] % identifier[groups_stage] ])
identifier[stypes] = identifier[np] . identifier[array] ( identifier[h5file] [ literal[string] ])
identifier[groups] = identifier[np] . identifier[hstack] ([ identifier[groups] , identifier[stypes] ])
identifier[groups] [:,[ literal[int] , literal[int] ]]= identifier[groups] [:,[ literal[int] , literal[int] ]]
keyword[return] identifier[points] , identifier[groups] | def _unpack_v2(h5file, stage):
"""Unpack groups from HDF5 v2 file"""
points = np.array(h5file['neuron1/%s/points' % stage])
# from documentation: The /neuron1/structure/unraveled reuses /neuron1/structure/raw
groups_stage = stage if stage != 'unraveled' else 'raw'
groups = np.array(h5file['neuron1/structure/%s' % groups_stage])
stypes = np.array(h5file['neuron1/structure/sectiontype'])
groups = np.hstack([groups, stypes])
groups[:, [1, 2]] = groups[:, [2, 1]]
return (points, groups) |
def tx_is_data_script(out_script, blockchain='bitcoin', **blockchain_opts):
"""
Given a blockchain name and an output script (tx['outs'][x]['script']),
determine whether or not it is a data-bearing script---i.e. one with data for the state engine.
Return True if so
Reurn False if not
"""
if blockchain == 'bitcoin':
return btc_tx_output_script_has_data(out_script, **blockchain_opts)
else:
raise ValueError('Unknown blockchain "{}"'.format(blockchain)) | def function[tx_is_data_script, parameter[out_script, blockchain]]:
constant[
Given a blockchain name and an output script (tx['outs'][x]['script']),
determine whether or not it is a data-bearing script---i.e. one with data for the state engine.
Return True if so
Reurn False if not
]
if compare[name[blockchain] equal[==] constant[bitcoin]] begin[:]
return[call[name[btc_tx_output_script_has_data], parameter[name[out_script]]]] | keyword[def] identifier[tx_is_data_script] ( identifier[out_script] , identifier[blockchain] = literal[string] ,** identifier[blockchain_opts] ):
literal[string]
keyword[if] identifier[blockchain] == literal[string] :
keyword[return] identifier[btc_tx_output_script_has_data] ( identifier[out_script] ,** identifier[blockchain_opts] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[blockchain] )) | def tx_is_data_script(out_script, blockchain='bitcoin', **blockchain_opts):
"""
Given a blockchain name and an output script (tx['outs'][x]['script']),
determine whether or not it is a data-bearing script---i.e. one with data for the state engine.
Return True if so
Reurn False if not
"""
if blockchain == 'bitcoin':
return btc_tx_output_script_has_data(out_script, **blockchain_opts) # depends on [control=['if'], data=[]]
else:
raise ValueError('Unknown blockchain "{}"'.format(blockchain)) |
def run(self):
"""
Perform the actual VASP run.
Returns:
(subprocess.Popen) Used for monitoring.
"""
cmd = list(self.vasp_cmd)
if self.auto_gamma:
vi = VaspInput.from_directory(".")
kpts = vi["KPOINTS"]
if kpts.style == Kpoints.supported_modes.Gamma \
and tuple(kpts.kpts[0]) == (1, 1, 1):
if self.gamma_vasp_cmd is not None and which(
self.gamma_vasp_cmd[-1]):
cmd = self.gamma_vasp_cmd
elif which(cmd[-1] + ".gamma"):
cmd[-1] += ".gamma"
logger.info("Running {}".format(" ".join(cmd)))
with open(self.output_file, 'w') as f_std, \
open(self.stderr_file, "w", buffering=1) as f_err:
# use line buffering for stderr
p = subprocess.Popen(cmd, stdout=f_std, stderr=f_err)
return p | def function[run, parameter[self]]:
constant[
Perform the actual VASP run.
Returns:
(subprocess.Popen) Used for monitoring.
]
variable[cmd] assign[=] call[name[list], parameter[name[self].vasp_cmd]]
if name[self].auto_gamma begin[:]
variable[vi] assign[=] call[name[VaspInput].from_directory, parameter[constant[.]]]
variable[kpts] assign[=] call[name[vi]][constant[KPOINTS]]
if <ast.BoolOp object at 0x7da1b0576b00> begin[:]
if <ast.BoolOp object at 0x7da1b0575630> begin[:]
variable[cmd] assign[=] name[self].gamma_vasp_cmd
call[name[logger].info, parameter[call[constant[Running {}].format, parameter[call[constant[ ].join, parameter[name[cmd]]]]]]]
with call[name[open], parameter[name[self].output_file, constant[w]]] begin[:]
variable[p] assign[=] call[name[subprocess].Popen, parameter[name[cmd]]]
return[name[p]] | keyword[def] identifier[run] ( identifier[self] ):
literal[string]
identifier[cmd] = identifier[list] ( identifier[self] . identifier[vasp_cmd] )
keyword[if] identifier[self] . identifier[auto_gamma] :
identifier[vi] = identifier[VaspInput] . identifier[from_directory] ( literal[string] )
identifier[kpts] = identifier[vi] [ literal[string] ]
keyword[if] identifier[kpts] . identifier[style] == identifier[Kpoints] . identifier[supported_modes] . identifier[Gamma] keyword[and] identifier[tuple] ( identifier[kpts] . identifier[kpts] [ literal[int] ])==( literal[int] , literal[int] , literal[int] ):
keyword[if] identifier[self] . identifier[gamma_vasp_cmd] keyword[is] keyword[not] keyword[None] keyword[and] identifier[which] (
identifier[self] . identifier[gamma_vasp_cmd] [- literal[int] ]):
identifier[cmd] = identifier[self] . identifier[gamma_vasp_cmd]
keyword[elif] identifier[which] ( identifier[cmd] [- literal[int] ]+ literal[string] ):
identifier[cmd] [- literal[int] ]+= literal[string]
identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[cmd] )))
keyword[with] identifier[open] ( identifier[self] . identifier[output_file] , literal[string] ) keyword[as] identifier[f_std] , identifier[open] ( identifier[self] . identifier[stderr_file] , literal[string] , identifier[buffering] = literal[int] ) keyword[as] identifier[f_err] :
identifier[p] = identifier[subprocess] . identifier[Popen] ( identifier[cmd] , identifier[stdout] = identifier[f_std] , identifier[stderr] = identifier[f_err] )
keyword[return] identifier[p] | def run(self):
"""
Perform the actual VASP run.
Returns:
(subprocess.Popen) Used for monitoring.
"""
cmd = list(self.vasp_cmd)
if self.auto_gamma:
vi = VaspInput.from_directory('.')
kpts = vi['KPOINTS']
if kpts.style == Kpoints.supported_modes.Gamma and tuple(kpts.kpts[0]) == (1, 1, 1):
if self.gamma_vasp_cmd is not None and which(self.gamma_vasp_cmd[-1]):
cmd = self.gamma_vasp_cmd # depends on [control=['if'], data=[]]
elif which(cmd[-1] + '.gamma'):
cmd[-1] += '.gamma' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
logger.info('Running {}'.format(' '.join(cmd)))
with open(self.output_file, 'w') as f_std, open(self.stderr_file, 'w', buffering=1) as f_err:
# use line buffering for stderr
p = subprocess.Popen(cmd, stdout=f_std, stderr=f_err) # depends on [control=['with'], data=['f_std']]
return p |
def flexifunction_read_req_encode(self, target_system, target_component, read_req_type, data_index):
'''
Reqest reading of flexifunction data
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
read_req_type : Type of flexifunction data requested (int16_t)
data_index : index into data where needed (int16_t)
'''
return MAVLink_flexifunction_read_req_message(target_system, target_component, read_req_type, data_index) | def function[flexifunction_read_req_encode, parameter[self, target_system, target_component, read_req_type, data_index]]:
constant[
Reqest reading of flexifunction data
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
read_req_type : Type of flexifunction data requested (int16_t)
data_index : index into data where needed (int16_t)
]
return[call[name[MAVLink_flexifunction_read_req_message], parameter[name[target_system], name[target_component], name[read_req_type], name[data_index]]]] | keyword[def] identifier[flexifunction_read_req_encode] ( identifier[self] , identifier[target_system] , identifier[target_component] , identifier[read_req_type] , identifier[data_index] ):
literal[string]
keyword[return] identifier[MAVLink_flexifunction_read_req_message] ( identifier[target_system] , identifier[target_component] , identifier[read_req_type] , identifier[data_index] ) | def flexifunction_read_req_encode(self, target_system, target_component, read_req_type, data_index):
"""
Reqest reading of flexifunction data
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
read_req_type : Type of flexifunction data requested (int16_t)
data_index : index into data where needed (int16_t)
"""
return MAVLink_flexifunction_read_req_message(target_system, target_component, read_req_type, data_index) |
def merkleroot(hashes):
"""
Args:
hashes: reversed binary form of transactions hashes, e.g.:
``binascii.unhexlify(h)[::-1] for h in block['tx']]``
Returns:
merkle root in hexadecimal form
"""
if len(hashes) == 1:
return binascii.hexlify(bytearray(reversed(hashes[0])))
if len(hashes) % 2 == 1:
hashes.append(hashes[-1])
parent_hashes = []
for i in range(0, len(hashes)-1, 2):
first_round_hash = hashlib.sha256(hashes[i] + hashes[i+1]).digest()
second_round_hash = hashlib.sha256(first_round_hash).digest()
parent_hashes.append(second_round_hash)
return merkleroot(parent_hashes) | def function[merkleroot, parameter[hashes]]:
constant[
Args:
hashes: reversed binary form of transactions hashes, e.g.:
``binascii.unhexlify(h)[::-1] for h in block['tx']]``
Returns:
merkle root in hexadecimal form
]
if compare[call[name[len], parameter[name[hashes]]] equal[==] constant[1]] begin[:]
return[call[name[binascii].hexlify, parameter[call[name[bytearray], parameter[call[name[reversed], parameter[call[name[hashes]][constant[0]]]]]]]]]
if compare[binary_operation[call[name[len], parameter[name[hashes]]] <ast.Mod object at 0x7da2590d6920> constant[2]] equal[==] constant[1]] begin[:]
call[name[hashes].append, parameter[call[name[hashes]][<ast.UnaryOp object at 0x7da18f723d60>]]]
variable[parent_hashes] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], binary_operation[call[name[len], parameter[name[hashes]]] - constant[1]], constant[2]]]] begin[:]
variable[first_round_hash] assign[=] call[call[name[hashlib].sha256, parameter[binary_operation[call[name[hashes]][name[i]] + call[name[hashes]][binary_operation[name[i] + constant[1]]]]]].digest, parameter[]]
variable[second_round_hash] assign[=] call[call[name[hashlib].sha256, parameter[name[first_round_hash]]].digest, parameter[]]
call[name[parent_hashes].append, parameter[name[second_round_hash]]]
return[call[name[merkleroot], parameter[name[parent_hashes]]]] | keyword[def] identifier[merkleroot] ( identifier[hashes] ):
literal[string]
keyword[if] identifier[len] ( identifier[hashes] )== literal[int] :
keyword[return] identifier[binascii] . identifier[hexlify] ( identifier[bytearray] ( identifier[reversed] ( identifier[hashes] [ literal[int] ])))
keyword[if] identifier[len] ( identifier[hashes] )% literal[int] == literal[int] :
identifier[hashes] . identifier[append] ( identifier[hashes] [- literal[int] ])
identifier[parent_hashes] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[hashes] )- literal[int] , literal[int] ):
identifier[first_round_hash] = identifier[hashlib] . identifier[sha256] ( identifier[hashes] [ identifier[i] ]+ identifier[hashes] [ identifier[i] + literal[int] ]). identifier[digest] ()
identifier[second_round_hash] = identifier[hashlib] . identifier[sha256] ( identifier[first_round_hash] ). identifier[digest] ()
identifier[parent_hashes] . identifier[append] ( identifier[second_round_hash] )
keyword[return] identifier[merkleroot] ( identifier[parent_hashes] ) | def merkleroot(hashes):
"""
Args:
hashes: reversed binary form of transactions hashes, e.g.:
``binascii.unhexlify(h)[::-1] for h in block['tx']]``
Returns:
merkle root in hexadecimal form
"""
if len(hashes) == 1:
return binascii.hexlify(bytearray(reversed(hashes[0]))) # depends on [control=['if'], data=[]]
if len(hashes) % 2 == 1:
hashes.append(hashes[-1]) # depends on [control=['if'], data=[]]
parent_hashes = []
for i in range(0, len(hashes) - 1, 2):
first_round_hash = hashlib.sha256(hashes[i] + hashes[i + 1]).digest()
second_round_hash = hashlib.sha256(first_round_hash).digest()
parent_hashes.append(second_round_hash) # depends on [control=['for'], data=['i']]
return merkleroot(parent_hashes) |
def modified(self):
"""Union[datetime.datetime, None]: Datetime at which the dataset was
last modified (:data:`None` until set from the server).
"""
modified_time = self._properties.get("lastModifiedTime")
if modified_time is not None:
# modified_time will be in milliseconds.
return google.cloud._helpers._datetime_from_microseconds(
1000.0 * float(modified_time)
) | def function[modified, parameter[self]]:
constant[Union[datetime.datetime, None]: Datetime at which the dataset was
last modified (:data:`None` until set from the server).
]
variable[modified_time] assign[=] call[name[self]._properties.get, parameter[constant[lastModifiedTime]]]
if compare[name[modified_time] is_not constant[None]] begin[:]
return[call[name[google].cloud._helpers._datetime_from_microseconds, parameter[binary_operation[constant[1000.0] * call[name[float], parameter[name[modified_time]]]]]]] | keyword[def] identifier[modified] ( identifier[self] ):
literal[string]
identifier[modified_time] = identifier[self] . identifier[_properties] . identifier[get] ( literal[string] )
keyword[if] identifier[modified_time] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[google] . identifier[cloud] . identifier[_helpers] . identifier[_datetime_from_microseconds] (
literal[int] * identifier[float] ( identifier[modified_time] )
) | def modified(self):
"""Union[datetime.datetime, None]: Datetime at which the dataset was
last modified (:data:`None` until set from the server).
"""
modified_time = self._properties.get('lastModifiedTime')
if modified_time is not None:
# modified_time will be in milliseconds.
return google.cloud._helpers._datetime_from_microseconds(1000.0 * float(modified_time)) # depends on [control=['if'], data=['modified_time']] |
def load_from_remote(remote_name, owner=None):
""" Loads the data from a remote repository.
:param remote_name: The name of the dataset in the remote repository
:param owner: (optional) The owner of the dataset. If nothing is provided, the current user
is used. For public datasets use 'public'.
:return: A new GMQLDataset or a GDataframe
"""
from .. import GMQLDataset
pmg = get_python_manager()
remote_manager = get_remote_manager()
parser = remote_manager.get_dataset_schema(remote_name, owner)
source_table = get_source_table()
id = source_table.search_source(remote=remote_name)
if id is None:
id = source_table.add_source(remote=remote_name, parser=parser)
index = pmg.read_dataset(str(id), parser.get_gmql_parser())
remote_sources = [id]
return GMQLDataset.GMQLDataset(index=index, location="remote", path_or_name=remote_name,
remote_sources=remote_sources) | def function[load_from_remote, parameter[remote_name, owner]]:
constant[ Loads the data from a remote repository.
:param remote_name: The name of the dataset in the remote repository
:param owner: (optional) The owner of the dataset. If nothing is provided, the current user
is used. For public datasets use 'public'.
:return: A new GMQLDataset or a GDataframe
]
from relative_module[None] import module[GMQLDataset]
variable[pmg] assign[=] call[name[get_python_manager], parameter[]]
variable[remote_manager] assign[=] call[name[get_remote_manager], parameter[]]
variable[parser] assign[=] call[name[remote_manager].get_dataset_schema, parameter[name[remote_name], name[owner]]]
variable[source_table] assign[=] call[name[get_source_table], parameter[]]
variable[id] assign[=] call[name[source_table].search_source, parameter[]]
if compare[name[id] is constant[None]] begin[:]
variable[id] assign[=] call[name[source_table].add_source, parameter[]]
variable[index] assign[=] call[name[pmg].read_dataset, parameter[call[name[str], parameter[name[id]]], call[name[parser].get_gmql_parser, parameter[]]]]
variable[remote_sources] assign[=] list[[<ast.Name object at 0x7da1b1bba0b0>]]
return[call[name[GMQLDataset].GMQLDataset, parameter[]]] | keyword[def] identifier[load_from_remote] ( identifier[remote_name] , identifier[owner] = keyword[None] ):
literal[string]
keyword[from] .. keyword[import] identifier[GMQLDataset]
identifier[pmg] = identifier[get_python_manager] ()
identifier[remote_manager] = identifier[get_remote_manager] ()
identifier[parser] = identifier[remote_manager] . identifier[get_dataset_schema] ( identifier[remote_name] , identifier[owner] )
identifier[source_table] = identifier[get_source_table] ()
identifier[id] = identifier[source_table] . identifier[search_source] ( identifier[remote] = identifier[remote_name] )
keyword[if] identifier[id] keyword[is] keyword[None] :
identifier[id] = identifier[source_table] . identifier[add_source] ( identifier[remote] = identifier[remote_name] , identifier[parser] = identifier[parser] )
identifier[index] = identifier[pmg] . identifier[read_dataset] ( identifier[str] ( identifier[id] ), identifier[parser] . identifier[get_gmql_parser] ())
identifier[remote_sources] =[ identifier[id] ]
keyword[return] identifier[GMQLDataset] . identifier[GMQLDataset] ( identifier[index] = identifier[index] , identifier[location] = literal[string] , identifier[path_or_name] = identifier[remote_name] ,
identifier[remote_sources] = identifier[remote_sources] ) | def load_from_remote(remote_name, owner=None):
""" Loads the data from a remote repository.
:param remote_name: The name of the dataset in the remote repository
:param owner: (optional) The owner of the dataset. If nothing is provided, the current user
is used. For public datasets use 'public'.
:return: A new GMQLDataset or a GDataframe
"""
from .. import GMQLDataset
pmg = get_python_manager()
remote_manager = get_remote_manager()
parser = remote_manager.get_dataset_schema(remote_name, owner)
source_table = get_source_table()
id = source_table.search_source(remote=remote_name)
if id is None:
id = source_table.add_source(remote=remote_name, parser=parser) # depends on [control=['if'], data=['id']]
index = pmg.read_dataset(str(id), parser.get_gmql_parser())
remote_sources = [id]
return GMQLDataset.GMQLDataset(index=index, location='remote', path_or_name=remote_name, remote_sources=remote_sources) |
def updown(self, increment):
"""
屏幕上下滚动
:params incrment: 1 向下滚动
-1 向上滚动
"""
scroll_line_num = self.screen_height - 4
# paging
if increment == -1 and self.markline == 0 and self.topline != 0:
self.topline -= 1
elif increment == 1 and self.markline + self.topline != len(self._lines) - 1 and self.markline == scroll_line_num:
self.topline += 1
# scroll
if increment == -1 and self.markline != 0:
self.markline -= 1
elif increment == 1 and self.markline != scroll_line_num and self.markline < len(self._lines) - 1:
self.markline += 1 | def function[updown, parameter[self, increment]]:
constant[
屏幕上下滚动
:params incrment: 1 向下滚动
-1 向上滚动
]
variable[scroll_line_num] assign[=] binary_operation[name[self].screen_height - constant[4]]
if <ast.BoolOp object at 0x7da1b2345e70> begin[:]
<ast.AugAssign object at 0x7da1b2344130>
if <ast.BoolOp object at 0x7da1b2344a60> begin[:]
<ast.AugAssign object at 0x7da1b2346d40> | keyword[def] identifier[updown] ( identifier[self] , identifier[increment] ):
literal[string]
identifier[scroll_line_num] = identifier[self] . identifier[screen_height] - literal[int]
keyword[if] identifier[increment] ==- literal[int] keyword[and] identifier[self] . identifier[markline] == literal[int] keyword[and] identifier[self] . identifier[topline] != literal[int] :
identifier[self] . identifier[topline] -= literal[int]
keyword[elif] identifier[increment] == literal[int] keyword[and] identifier[self] . identifier[markline] + identifier[self] . identifier[topline] != identifier[len] ( identifier[self] . identifier[_lines] )- literal[int] keyword[and] identifier[self] . identifier[markline] == identifier[scroll_line_num] :
identifier[self] . identifier[topline] += literal[int]
keyword[if] identifier[increment] ==- literal[int] keyword[and] identifier[self] . identifier[markline] != literal[int] :
identifier[self] . identifier[markline] -= literal[int]
keyword[elif] identifier[increment] == literal[int] keyword[and] identifier[self] . identifier[markline] != identifier[scroll_line_num] keyword[and] identifier[self] . identifier[markline] < identifier[len] ( identifier[self] . identifier[_lines] )- literal[int] :
identifier[self] . identifier[markline] += literal[int] | def updown(self, increment):
"""
屏幕上下滚动
:params incrment: 1 向下滚动
-1 向上滚动
"""
scroll_line_num = self.screen_height - 4
# paging
if increment == -1 and self.markline == 0 and (self.topline != 0):
self.topline -= 1 # depends on [control=['if'], data=[]]
elif increment == 1 and self.markline + self.topline != len(self._lines) - 1 and (self.markline == scroll_line_num):
self.topline += 1 # depends on [control=['if'], data=[]]
# scroll
if increment == -1 and self.markline != 0:
self.markline -= 1 # depends on [control=['if'], data=[]]
elif increment == 1 and self.markline != scroll_line_num and (self.markline < len(self._lines) - 1):
self.markline += 1 # depends on [control=['if'], data=[]] |
def reweight_run(res, logp_new, logp_old=None):
"""
Reweight a given run based on a new target distribution.
Parameters
----------
res : :class:`~dynesty.results.Results` instance
The :class:`~dynesty.results.Results` instance taken from a previous
nested sampling run.
logp_new : `~numpy.ndarray` with shape (nsamps,)
New target distribution evaluated at the location of the samples.
logp_old : `~numpy.ndarray` with shape (nsamps,)
Old target distribution evaluated at the location of the samples.
If not provided, the `logl` values from `res` will be used.
Returns
-------
new_res : :class:`~dynesty.results.Results` instance
A new :class:`~dynesty.results.Results` instance with corresponding
weights based on our reweighted samples.
"""
# Extract info.
if logp_old is None:
logp_old = res['logl']
logrwt = logp_new - logp_old # ln(reweight)
logvol = res['logvol']
logl = res['logl']
nsamps = len(logvol)
# Compute weights using quadratic estimator.
h = 0.
logz = -1.e300
loglstar = -1.e300
logzvar = 0.
logvols_pad = np.concatenate(([0.], logvol))
logdvols = misc.logsumexp(a=np.c_[logvols_pad[:-1], logvols_pad[1:]],
axis=1, b=np.c_[np.ones(nsamps),
-np.ones(nsamps)])
logdvols += math.log(0.5)
dlvs = -np.diff(np.append(0., logvol))
saved_logwt, saved_logz, saved_logzvar, saved_h = [], [], [], []
for i in range(nsamps):
loglstar_new = logl[i]
logdvol, dlv = logdvols[i], dlvs[i]
logwt = np.logaddexp(loglstar_new, loglstar) + logdvol + logrwt[i]
logz_new = np.logaddexp(logz, logwt)
try:
lzterm = (math.exp(loglstar - logz_new) * loglstar +
math.exp(loglstar_new - logz_new) * loglstar_new)
except:
lzterm = 0.
h_new = (math.exp(logdvol) * lzterm +
math.exp(logz - logz_new) * (h + logz) -
logz_new)
dh = h_new - h
h = h_new
logz = logz_new
logzvar += dh * dlv
loglstar = loglstar_new
saved_logwt.append(logwt)
saved_logz.append(logz)
saved_logzvar.append(logzvar)
saved_h.append(h)
# Copy results.
new_res = Results([item for item in res.items()])
# Overwrite items with our new estimates.
new_res.logwt = np.array(saved_logwt)
new_res.logz = np.array(saved_logz)
new_res.logzerr = np.sqrt(np.array(saved_logzvar))
new_res.h = np.array(saved_h)
return new_res | def function[reweight_run, parameter[res, logp_new, logp_old]]:
constant[
Reweight a given run based on a new target distribution.
Parameters
----------
res : :class:`~dynesty.results.Results` instance
The :class:`~dynesty.results.Results` instance taken from a previous
nested sampling run.
logp_new : `~numpy.ndarray` with shape (nsamps,)
New target distribution evaluated at the location of the samples.
logp_old : `~numpy.ndarray` with shape (nsamps,)
Old target distribution evaluated at the location of the samples.
If not provided, the `logl` values from `res` will be used.
Returns
-------
new_res : :class:`~dynesty.results.Results` instance
A new :class:`~dynesty.results.Results` instance with corresponding
weights based on our reweighted samples.
]
if compare[name[logp_old] is constant[None]] begin[:]
variable[logp_old] assign[=] call[name[res]][constant[logl]]
variable[logrwt] assign[=] binary_operation[name[logp_new] - name[logp_old]]
variable[logvol] assign[=] call[name[res]][constant[logvol]]
variable[logl] assign[=] call[name[res]][constant[logl]]
variable[nsamps] assign[=] call[name[len], parameter[name[logvol]]]
variable[h] assign[=] constant[0.0]
variable[logz] assign[=] <ast.UnaryOp object at 0x7da1b1ebadd0>
variable[loglstar] assign[=] <ast.UnaryOp object at 0x7da1b1ee9ed0>
variable[logzvar] assign[=] constant[0.0]
variable[logvols_pad] assign[=] call[name[np].concatenate, parameter[tuple[[<ast.List object at 0x7da1b1ee9150>, <ast.Name object at 0x7da1b1ee92a0>]]]]
variable[logdvols] assign[=] call[name[misc].logsumexp, parameter[]]
<ast.AugAssign object at 0x7da1b1eea950>
variable[dlvs] assign[=] <ast.UnaryOp object at 0x7da1b1eeb0d0>
<ast.Tuple object at 0x7da1b1eeaa10> assign[=] tuple[[<ast.List object at 0x7da1b1eea440>, <ast.List object at 0x7da1b1ee9570>, <ast.List object at 0x7da1b1eeae90>, <ast.List object at 0x7da1b1eeaa40>]]
for taget[name[i]] in starred[call[name[range], parameter[name[nsamps]]]] begin[:]
variable[loglstar_new] assign[=] call[name[logl]][name[i]]
<ast.Tuple object at 0x7da1b1ee9de0> assign[=] tuple[[<ast.Subscript object at 0x7da1b1eea170>, <ast.Subscript object at 0x7da1b1eea1a0>]]
variable[logwt] assign[=] binary_operation[binary_operation[call[name[np].logaddexp, parameter[name[loglstar_new], name[loglstar]]] + name[logdvol]] + call[name[logrwt]][name[i]]]
variable[logz_new] assign[=] call[name[np].logaddexp, parameter[name[logz], name[logwt]]]
<ast.Try object at 0x7da1b1ee8e80>
variable[h_new] assign[=] binary_operation[binary_operation[binary_operation[call[name[math].exp, parameter[name[logdvol]]] * name[lzterm]] + binary_operation[call[name[math].exp, parameter[binary_operation[name[logz] - name[logz_new]]]] * binary_operation[name[h] + name[logz]]]] - name[logz_new]]
variable[dh] assign[=] binary_operation[name[h_new] - name[h]]
variable[h] assign[=] name[h_new]
variable[logz] assign[=] name[logz_new]
<ast.AugAssign object at 0x7da18bc73940>
variable[loglstar] assign[=] name[loglstar_new]
call[name[saved_logwt].append, parameter[name[logwt]]]
call[name[saved_logz].append, parameter[name[logz]]]
call[name[saved_logzvar].append, parameter[name[logzvar]]]
call[name[saved_h].append, parameter[name[h]]]
variable[new_res] assign[=] call[name[Results], parameter[<ast.ListComp object at 0x7da1b1e8d1e0>]]
name[new_res].logwt assign[=] call[name[np].array, parameter[name[saved_logwt]]]
name[new_res].logz assign[=] call[name[np].array, parameter[name[saved_logz]]]
name[new_res].logzerr assign[=] call[name[np].sqrt, parameter[call[name[np].array, parameter[name[saved_logzvar]]]]]
name[new_res].h assign[=] call[name[np].array, parameter[name[saved_h]]]
return[name[new_res]] | keyword[def] identifier[reweight_run] ( identifier[res] , identifier[logp_new] , identifier[logp_old] = keyword[None] ):
literal[string]
keyword[if] identifier[logp_old] keyword[is] keyword[None] :
identifier[logp_old] = identifier[res] [ literal[string] ]
identifier[logrwt] = identifier[logp_new] - identifier[logp_old]
identifier[logvol] = identifier[res] [ literal[string] ]
identifier[logl] = identifier[res] [ literal[string] ]
identifier[nsamps] = identifier[len] ( identifier[logvol] )
identifier[h] = literal[int]
identifier[logz] =- literal[int]
identifier[loglstar] =- literal[int]
identifier[logzvar] = literal[int]
identifier[logvols_pad] = identifier[np] . identifier[concatenate] (([ literal[int] ], identifier[logvol] ))
identifier[logdvols] = identifier[misc] . identifier[logsumexp] ( identifier[a] = identifier[np] . identifier[c_] [ identifier[logvols_pad] [:- literal[int] ], identifier[logvols_pad] [ literal[int] :]],
identifier[axis] = literal[int] , identifier[b] = identifier[np] . identifier[c_] [ identifier[np] . identifier[ones] ( identifier[nsamps] ),
- identifier[np] . identifier[ones] ( identifier[nsamps] )])
identifier[logdvols] += identifier[math] . identifier[log] ( literal[int] )
identifier[dlvs] =- identifier[np] . identifier[diff] ( identifier[np] . identifier[append] ( literal[int] , identifier[logvol] ))
identifier[saved_logwt] , identifier[saved_logz] , identifier[saved_logzvar] , identifier[saved_h] =[],[],[],[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[nsamps] ):
identifier[loglstar_new] = identifier[logl] [ identifier[i] ]
identifier[logdvol] , identifier[dlv] = identifier[logdvols] [ identifier[i] ], identifier[dlvs] [ identifier[i] ]
identifier[logwt] = identifier[np] . identifier[logaddexp] ( identifier[loglstar_new] , identifier[loglstar] )+ identifier[logdvol] + identifier[logrwt] [ identifier[i] ]
identifier[logz_new] = identifier[np] . identifier[logaddexp] ( identifier[logz] , identifier[logwt] )
keyword[try] :
identifier[lzterm] =( identifier[math] . identifier[exp] ( identifier[loglstar] - identifier[logz_new] )* identifier[loglstar] +
identifier[math] . identifier[exp] ( identifier[loglstar_new] - identifier[logz_new] )* identifier[loglstar_new] )
keyword[except] :
identifier[lzterm] = literal[int]
identifier[h_new] =( identifier[math] . identifier[exp] ( identifier[logdvol] )* identifier[lzterm] +
identifier[math] . identifier[exp] ( identifier[logz] - identifier[logz_new] )*( identifier[h] + identifier[logz] )-
identifier[logz_new] )
identifier[dh] = identifier[h_new] - identifier[h]
identifier[h] = identifier[h_new]
identifier[logz] = identifier[logz_new]
identifier[logzvar] += identifier[dh] * identifier[dlv]
identifier[loglstar] = identifier[loglstar_new]
identifier[saved_logwt] . identifier[append] ( identifier[logwt] )
identifier[saved_logz] . identifier[append] ( identifier[logz] )
identifier[saved_logzvar] . identifier[append] ( identifier[logzvar] )
identifier[saved_h] . identifier[append] ( identifier[h] )
identifier[new_res] = identifier[Results] ([ identifier[item] keyword[for] identifier[item] keyword[in] identifier[res] . identifier[items] ()])
identifier[new_res] . identifier[logwt] = identifier[np] . identifier[array] ( identifier[saved_logwt] )
identifier[new_res] . identifier[logz] = identifier[np] . identifier[array] ( identifier[saved_logz] )
identifier[new_res] . identifier[logzerr] = identifier[np] . identifier[sqrt] ( identifier[np] . identifier[array] ( identifier[saved_logzvar] ))
identifier[new_res] . identifier[h] = identifier[np] . identifier[array] ( identifier[saved_h] )
keyword[return] identifier[new_res] | def reweight_run(res, logp_new, logp_old=None):
"""
Reweight a given run based on a new target distribution.
Parameters
----------
res : :class:`~dynesty.results.Results` instance
The :class:`~dynesty.results.Results` instance taken from a previous
nested sampling run.
logp_new : `~numpy.ndarray` with shape (nsamps,)
New target distribution evaluated at the location of the samples.
logp_old : `~numpy.ndarray` with shape (nsamps,)
Old target distribution evaluated at the location of the samples.
If not provided, the `logl` values from `res` will be used.
Returns
-------
new_res : :class:`~dynesty.results.Results` instance
A new :class:`~dynesty.results.Results` instance with corresponding
weights based on our reweighted samples.
"""
# Extract info.
if logp_old is None:
logp_old = res['logl'] # depends on [control=['if'], data=['logp_old']]
logrwt = logp_new - logp_old # ln(reweight)
logvol = res['logvol']
logl = res['logl']
nsamps = len(logvol)
# Compute weights using quadratic estimator.
h = 0.0
logz = -1e+300
loglstar = -1e+300
logzvar = 0.0
logvols_pad = np.concatenate(([0.0], logvol))
logdvols = misc.logsumexp(a=np.c_[logvols_pad[:-1], logvols_pad[1:]], axis=1, b=np.c_[np.ones(nsamps), -np.ones(nsamps)])
logdvols += math.log(0.5)
dlvs = -np.diff(np.append(0.0, logvol))
(saved_logwt, saved_logz, saved_logzvar, saved_h) = ([], [], [], [])
for i in range(nsamps):
loglstar_new = logl[i]
(logdvol, dlv) = (logdvols[i], dlvs[i])
logwt = np.logaddexp(loglstar_new, loglstar) + logdvol + logrwt[i]
logz_new = np.logaddexp(logz, logwt)
try:
lzterm = math.exp(loglstar - logz_new) * loglstar + math.exp(loglstar_new - logz_new) * loglstar_new # depends on [control=['try'], data=[]]
except:
lzterm = 0.0 # depends on [control=['except'], data=[]]
h_new = math.exp(logdvol) * lzterm + math.exp(logz - logz_new) * (h + logz) - logz_new
dh = h_new - h
h = h_new
logz = logz_new
logzvar += dh * dlv
loglstar = loglstar_new
saved_logwt.append(logwt)
saved_logz.append(logz)
saved_logzvar.append(logzvar)
saved_h.append(h) # depends on [control=['for'], data=['i']]
# Copy results.
new_res = Results([item for item in res.items()])
# Overwrite items with our new estimates.
new_res.logwt = np.array(saved_logwt)
new_res.logz = np.array(saved_logz)
new_res.logzerr = np.sqrt(np.array(saved_logzvar))
new_res.h = np.array(saved_h)
return new_res |
def output(self, to=None, *args, **kwargs):
'''Outputs to a stream (like a file or request)'''
for blok in self:
blok.output(to, *args, **kwargs)
return self | def function[output, parameter[self, to]]:
constant[Outputs to a stream (like a file or request)]
for taget[name[blok]] in starred[name[self]] begin[:]
call[name[blok].output, parameter[name[to], <ast.Starred object at 0x7da1b0034d90>]]
return[name[self]] | keyword[def] identifier[output] ( identifier[self] , identifier[to] = keyword[None] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[for] identifier[blok] keyword[in] identifier[self] :
identifier[blok] . identifier[output] ( identifier[to] ,* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[self] | def output(self, to=None, *args, **kwargs):
"""Outputs to a stream (like a file or request)"""
for blok in self:
blok.output(to, *args, **kwargs) # depends on [control=['for'], data=['blok']]
return self |
def categorize(self, name, value, criteria,
color=None, marker=None, linestyle=None):
"""Assign scenarios to a category according to specific criteria
or display the category assignment
Parameters
----------
name: str
category column name
value: str
category identifier
criteria: dict
dictionary with variables mapped to applicable checks
('up' and 'lo' for respective bounds, 'year' for years - optional)
color: str
assign a color to this category for plotting
marker: str
assign a marker to this category for plotting
linestyle: str
assign a linestyle to this category for plotting
"""
# add plotting run control
for kind, arg in [('color', color), ('marker', marker),
('linestyle', linestyle)]:
if arg:
run_control().update({kind: {name: {value: arg}}})
# find all data that matches categorization
rows = _apply_criteria(self.data, criteria,
in_range=True, return_test='all')
idx = _meta_idx(rows)
if len(idx) == 0:
logger().info("No scenarios satisfy the criteria")
return # EXIT FUNCTION
# update metadata dataframe
self._new_meta_column(name)
self.meta.loc[idx, name] = value
msg = '{} scenario{} categorized as `{}: {}`'
logger().info(msg.format(len(idx), '' if len(idx) == 1 else 's',
name, value)) | def function[categorize, parameter[self, name, value, criteria, color, marker, linestyle]]:
constant[Assign scenarios to a category according to specific criteria
or display the category assignment
Parameters
----------
name: str
category column name
value: str
category identifier
criteria: dict
dictionary with variables mapped to applicable checks
('up' and 'lo' for respective bounds, 'year' for years - optional)
color: str
assign a color to this category for plotting
marker: str
assign a marker to this category for plotting
linestyle: str
assign a linestyle to this category for plotting
]
for taget[tuple[[<ast.Name object at 0x7da207f01210>, <ast.Name object at 0x7da207f00400>]]] in starred[list[[<ast.Tuple object at 0x7da207f01c00>, <ast.Tuple object at 0x7da207f02ad0>, <ast.Tuple object at 0x7da207f01570>]]] begin[:]
if name[arg] begin[:]
call[call[name[run_control], parameter[]].update, parameter[dictionary[[<ast.Name object at 0x7da207f025c0>], [<ast.Dict object at 0x7da207f001f0>]]]]
variable[rows] assign[=] call[name[_apply_criteria], parameter[name[self].data, name[criteria]]]
variable[idx] assign[=] call[name[_meta_idx], parameter[name[rows]]]
if compare[call[name[len], parameter[name[idx]]] equal[==] constant[0]] begin[:]
call[call[name[logger], parameter[]].info, parameter[constant[No scenarios satisfy the criteria]]]
return[None]
call[name[self]._new_meta_column, parameter[name[name]]]
call[name[self].meta.loc][tuple[[<ast.Name object at 0x7da1b0f07f10>, <ast.Name object at 0x7da1b0f07e20>]]] assign[=] name[value]
variable[msg] assign[=] constant[{} scenario{} categorized as `{}: {}`]
call[call[name[logger], parameter[]].info, parameter[call[name[msg].format, parameter[call[name[len], parameter[name[idx]]], <ast.IfExp object at 0x7da1b0f074f0>, name[name], name[value]]]]] | keyword[def] identifier[categorize] ( identifier[self] , identifier[name] , identifier[value] , identifier[criteria] ,
identifier[color] = keyword[None] , identifier[marker] = keyword[None] , identifier[linestyle] = keyword[None] ):
literal[string]
keyword[for] identifier[kind] , identifier[arg] keyword[in] [( literal[string] , identifier[color] ),( literal[string] , identifier[marker] ),
( literal[string] , identifier[linestyle] )]:
keyword[if] identifier[arg] :
identifier[run_control] (). identifier[update] ({ identifier[kind] :{ identifier[name] :{ identifier[value] : identifier[arg] }}})
identifier[rows] = identifier[_apply_criteria] ( identifier[self] . identifier[data] , identifier[criteria] ,
identifier[in_range] = keyword[True] , identifier[return_test] = literal[string] )
identifier[idx] = identifier[_meta_idx] ( identifier[rows] )
keyword[if] identifier[len] ( identifier[idx] )== literal[int] :
identifier[logger] (). identifier[info] ( literal[string] )
keyword[return]
identifier[self] . identifier[_new_meta_column] ( identifier[name] )
identifier[self] . identifier[meta] . identifier[loc] [ identifier[idx] , identifier[name] ]= identifier[value]
identifier[msg] = literal[string]
identifier[logger] (). identifier[info] ( identifier[msg] . identifier[format] ( identifier[len] ( identifier[idx] ), literal[string] keyword[if] identifier[len] ( identifier[idx] )== literal[int] keyword[else] literal[string] ,
identifier[name] , identifier[value] )) | def categorize(self, name, value, criteria, color=None, marker=None, linestyle=None):
"""Assign scenarios to a category according to specific criteria
or display the category assignment
Parameters
----------
name: str
category column name
value: str
category identifier
criteria: dict
dictionary with variables mapped to applicable checks
('up' and 'lo' for respective bounds, 'year' for years - optional)
color: str
assign a color to this category for plotting
marker: str
assign a marker to this category for plotting
linestyle: str
assign a linestyle to this category for plotting
"""
# add plotting run control
for (kind, arg) in [('color', color), ('marker', marker), ('linestyle', linestyle)]:
if arg:
run_control().update({kind: {name: {value: arg}}}) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# find all data that matches categorization
rows = _apply_criteria(self.data, criteria, in_range=True, return_test='all')
idx = _meta_idx(rows)
if len(idx) == 0:
logger().info('No scenarios satisfy the criteria')
return # EXIT FUNCTION # depends on [control=['if'], data=[]]
# update metadata dataframe
self._new_meta_column(name)
self.meta.loc[idx, name] = value
msg = '{} scenario{} categorized as `{}: {}`'
logger().info(msg.format(len(idx), '' if len(idx) == 1 else 's', name, value)) |
def advance_cluster_time(self, cluster_time):
"""Update the cluster time for this session.
:Parameters:
- `cluster_time`: The
:data:`~pymongo.client_session.ClientSession.cluster_time` from
another `ClientSession` instance.
"""
if not isinstance(cluster_time, abc.Mapping):
raise TypeError(
"cluster_time must be a subclass of collections.Mapping")
if not isinstance(cluster_time.get("clusterTime"), Timestamp):
raise ValueError("Invalid cluster_time")
self._advance_cluster_time(cluster_time) | def function[advance_cluster_time, parameter[self, cluster_time]]:
constant[Update the cluster time for this session.
:Parameters:
- `cluster_time`: The
:data:`~pymongo.client_session.ClientSession.cluster_time` from
another `ClientSession` instance.
]
if <ast.UnaryOp object at 0x7da20c992140> begin[:]
<ast.Raise object at 0x7da20c990400>
if <ast.UnaryOp object at 0x7da20c992aa0> begin[:]
<ast.Raise object at 0x7da20c992a40>
call[name[self]._advance_cluster_time, parameter[name[cluster_time]]] | keyword[def] identifier[advance_cluster_time] ( identifier[self] , identifier[cluster_time] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[cluster_time] , identifier[abc] . identifier[Mapping] ):
keyword[raise] identifier[TypeError] (
literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[cluster_time] . identifier[get] ( literal[string] ), identifier[Timestamp] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[_advance_cluster_time] ( identifier[cluster_time] ) | def advance_cluster_time(self, cluster_time):
"""Update the cluster time for this session.
:Parameters:
- `cluster_time`: The
:data:`~pymongo.client_session.ClientSession.cluster_time` from
another `ClientSession` instance.
"""
if not isinstance(cluster_time, abc.Mapping):
raise TypeError('cluster_time must be a subclass of collections.Mapping') # depends on [control=['if'], data=[]]
if not isinstance(cluster_time.get('clusterTime'), Timestamp):
raise ValueError('Invalid cluster_time') # depends on [control=['if'], data=[]]
self._advance_cluster_time(cluster_time) |
def debug(self):
"""Return debug setting"""
debug = False
if os.path.isfile(os.path.join(self.tcex.args.tc_temp_path, 'DEBUG')):
debug = True
return debug | def function[debug, parameter[self]]:
constant[Return debug setting]
variable[debug] assign[=] constant[False]
if call[name[os].path.isfile, parameter[call[name[os].path.join, parameter[name[self].tcex.args.tc_temp_path, constant[DEBUG]]]]] begin[:]
variable[debug] assign[=] constant[True]
return[name[debug]] | keyword[def] identifier[debug] ( identifier[self] ):
literal[string]
identifier[debug] = keyword[False]
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[tcex] . identifier[args] . identifier[tc_temp_path] , literal[string] )):
identifier[debug] = keyword[True]
keyword[return] identifier[debug] | def debug(self):
"""Return debug setting"""
debug = False
if os.path.isfile(os.path.join(self.tcex.args.tc_temp_path, 'DEBUG')):
debug = True # depends on [control=['if'], data=[]]
return debug |
def deleteInactiveDevicesByAge(self, age_days):
"""
Delete all inactive devices from the device list storage and cache that are older
then a given number of days. This also deletes the corresponding sessions, so if
a device comes active again and tries to send you an encrypted message you will
not be able to decrypt it. You are not allowed to delete inactive devices that
were inactive for less than a day. Thus, the minimum value for age_days is 1.
It is recommended to keep inactive devices for a longer period of time (e.g.
multiple months), as it reduces the chance for message loss and doesn't require a
lot of storage.
The recommended alternative to deleting inactive devices by age is to delete them
by count/quota. Look at the deleteInactiveDevicesByQuota method for that variant.
"""
if age_days < 1:
return
now = time.time()
bare_jids = yield self._storage.listJIDs()
for bare_jid in bare_jids:
devices = yield self.__loadInactiveDevices(bare_jid)
delete_devices = []
for device, timestamp in list(devices.items()):
elapsed_s = now - timestamp
elapsed_m = elapsed_s / 60
elapsed_h = elapsed_m / 60
elapsed_d = elapsed_h / 24
if elapsed_d >= age_days:
delete_devices.append(device)
if len(delete_devices) > 0:
yield self.__deleteInactiveDevices(bare_jid, delete_devices) | def function[deleteInactiveDevicesByAge, parameter[self, age_days]]:
constant[
Delete all inactive devices from the device list storage and cache that are older
then a given number of days. This also deletes the corresponding sessions, so if
a device comes active again and tries to send you an encrypted message you will
not be able to decrypt it. You are not allowed to delete inactive devices that
were inactive for less than a day. Thus, the minimum value for age_days is 1.
It is recommended to keep inactive devices for a longer period of time (e.g.
multiple months), as it reduces the chance for message loss and doesn't require a
lot of storage.
The recommended alternative to deleting inactive devices by age is to delete them
by count/quota. Look at the deleteInactiveDevicesByQuota method for that variant.
]
if compare[name[age_days] less[<] constant[1]] begin[:]
return[None]
variable[now] assign[=] call[name[time].time, parameter[]]
variable[bare_jids] assign[=] <ast.Yield object at 0x7da1b1906e90>
for taget[name[bare_jid]] in starred[name[bare_jids]] begin[:]
variable[devices] assign[=] <ast.Yield object at 0x7da1b19065c0>
variable[delete_devices] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b1906d40>, <ast.Name object at 0x7da1b1904f40>]]] in starred[call[name[list], parameter[call[name[devices].items, parameter[]]]]] begin[:]
variable[elapsed_s] assign[=] binary_operation[name[now] - name[timestamp]]
variable[elapsed_m] assign[=] binary_operation[name[elapsed_s] / constant[60]]
variable[elapsed_h] assign[=] binary_operation[name[elapsed_m] / constant[60]]
variable[elapsed_d] assign[=] binary_operation[name[elapsed_h] / constant[24]]
if compare[name[elapsed_d] greater_or_equal[>=] name[age_days]] begin[:]
call[name[delete_devices].append, parameter[name[device]]]
if compare[call[name[len], parameter[name[delete_devices]]] greater[>] constant[0]] begin[:]
<ast.Yield object at 0x7da20c6c70d0> | keyword[def] identifier[deleteInactiveDevicesByAge] ( identifier[self] , identifier[age_days] ):
literal[string]
keyword[if] identifier[age_days] < literal[int] :
keyword[return]
identifier[now] = identifier[time] . identifier[time] ()
identifier[bare_jids] = keyword[yield] identifier[self] . identifier[_storage] . identifier[listJIDs] ()
keyword[for] identifier[bare_jid] keyword[in] identifier[bare_jids] :
identifier[devices] = keyword[yield] identifier[self] . identifier[__loadInactiveDevices] ( identifier[bare_jid] )
identifier[delete_devices] =[]
keyword[for] identifier[device] , identifier[timestamp] keyword[in] identifier[list] ( identifier[devices] . identifier[items] ()):
identifier[elapsed_s] = identifier[now] - identifier[timestamp]
identifier[elapsed_m] = identifier[elapsed_s] / literal[int]
identifier[elapsed_h] = identifier[elapsed_m] / literal[int]
identifier[elapsed_d] = identifier[elapsed_h] / literal[int]
keyword[if] identifier[elapsed_d] >= identifier[age_days] :
identifier[delete_devices] . identifier[append] ( identifier[device] )
keyword[if] identifier[len] ( identifier[delete_devices] )> literal[int] :
keyword[yield] identifier[self] . identifier[__deleteInactiveDevices] ( identifier[bare_jid] , identifier[delete_devices] ) | def deleteInactiveDevicesByAge(self, age_days):
"""
Delete all inactive devices from the device list storage and cache that are older
then a given number of days. This also deletes the corresponding sessions, so if
a device comes active again and tries to send you an encrypted message you will
not be able to decrypt it. You are not allowed to delete inactive devices that
were inactive for less than a day. Thus, the minimum value for age_days is 1.
It is recommended to keep inactive devices for a longer period of time (e.g.
multiple months), as it reduces the chance for message loss and doesn't require a
lot of storage.
The recommended alternative to deleting inactive devices by age is to delete them
by count/quota. Look at the deleteInactiveDevicesByQuota method for that variant.
"""
if age_days < 1:
return # depends on [control=['if'], data=[]]
now = time.time()
bare_jids = (yield self._storage.listJIDs())
for bare_jid in bare_jids:
devices = (yield self.__loadInactiveDevices(bare_jid))
delete_devices = []
for (device, timestamp) in list(devices.items()):
elapsed_s = now - timestamp
elapsed_m = elapsed_s / 60
elapsed_h = elapsed_m / 60
elapsed_d = elapsed_h / 24
if elapsed_d >= age_days:
delete_devices.append(device) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if len(delete_devices) > 0:
yield self.__deleteInactiveDevices(bare_jid, delete_devices) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['bare_jid']] |
def _SI(size, K=1024, i='i'):
'''Return size as SI string.
'''
if 1 < K < size:
f = float(size)
for si in iter('KMGPTE'):
f /= K
if f < K:
return ' or %.1f %s%sB' % (f, si, i)
return '' | def function[_SI, parameter[size, K, i]]:
constant[Return size as SI string.
]
if compare[constant[1] less[<] name[K]] begin[:]
variable[f] assign[=] call[name[float], parameter[name[size]]]
for taget[name[si]] in starred[call[name[iter], parameter[constant[KMGPTE]]]] begin[:]
<ast.AugAssign object at 0x7da204960df0>
if compare[name[f] less[<] name[K]] begin[:]
return[binary_operation[constant[ or %.1f %s%sB] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da204963e50>, <ast.Name object at 0x7da204960460>, <ast.Name object at 0x7da20c7c8580>]]]]
return[constant[]] | keyword[def] identifier[_SI] ( identifier[size] , identifier[K] = literal[int] , identifier[i] = literal[string] ):
literal[string]
keyword[if] literal[int] < identifier[K] < identifier[size] :
identifier[f] = identifier[float] ( identifier[size] )
keyword[for] identifier[si] keyword[in] identifier[iter] ( literal[string] ):
identifier[f] /= identifier[K]
keyword[if] identifier[f] < identifier[K] :
keyword[return] literal[string] %( identifier[f] , identifier[si] , identifier[i] )
keyword[return] literal[string] | def _SI(size, K=1024, i='i'):
"""Return size as SI string.
"""
if 1 < K < size:
f = float(size)
for si in iter('KMGPTE'):
f /= K
if f < K:
return ' or %.1f %s%sB' % (f, si, i) # depends on [control=['if'], data=['f']] # depends on [control=['for'], data=['si']] # depends on [control=['if'], data=['K']]
return '' |
def insert_before(self, target):
"""insert this widget into the targets parent before the target"""
if not target.parent:
return
target.parent.insert(target.parent.sprites.index(target), self) | def function[insert_before, parameter[self, target]]:
constant[insert this widget into the targets parent before the target]
if <ast.UnaryOp object at 0x7da204566920> begin[:]
return[None]
call[name[target].parent.insert, parameter[call[name[target].parent.sprites.index, parameter[name[target]]], name[self]]] | keyword[def] identifier[insert_before] ( identifier[self] , identifier[target] ):
literal[string]
keyword[if] keyword[not] identifier[target] . identifier[parent] :
keyword[return]
identifier[target] . identifier[parent] . identifier[insert] ( identifier[target] . identifier[parent] . identifier[sprites] . identifier[index] ( identifier[target] ), identifier[self] ) | def insert_before(self, target):
"""insert this widget into the targets parent before the target"""
if not target.parent:
return # depends on [control=['if'], data=[]]
target.parent.insert(target.parent.sprites.index(target), self) |
def begin_tag(self, name: str) -> Node:
"""Save the current index under the given name."""
# Check if we could attach tag cache to current rule_nodes scope
self.tag_cache[name] = Tag(self._stream, self._stream.index)
return True | def function[begin_tag, parameter[self, name]]:
constant[Save the current index under the given name.]
call[name[self].tag_cache][name[name]] assign[=] call[name[Tag], parameter[name[self]._stream, name[self]._stream.index]]
return[constant[True]] | keyword[def] identifier[begin_tag] ( identifier[self] , identifier[name] : identifier[str] )-> identifier[Node] :
literal[string]
identifier[self] . identifier[tag_cache] [ identifier[name] ]= identifier[Tag] ( identifier[self] . identifier[_stream] , identifier[self] . identifier[_stream] . identifier[index] )
keyword[return] keyword[True] | def begin_tag(self, name: str) -> Node:
"""Save the current index under the given name."""
# Check if we could attach tag cache to current rule_nodes scope
self.tag_cache[name] = Tag(self._stream, self._stream.index)
return True |
def keys(self):
"""D.keys() -> a set-like object providing a view on D's keys"""
return set(
row[self._keycol] for row in self._connection.execute(
"""SELECT DISTINCT {} FROM {} ORDER BY {} ASC;""".format(
self.selkeycol,
self.table,
self._keycol
)
)
) | def function[keys, parameter[self]]:
constant[D.keys() -> a set-like object providing a view on D's keys]
return[call[name[set], parameter[<ast.GeneratorExp object at 0x7da18bccaf50>]]] | keyword[def] identifier[keys] ( identifier[self] ):
literal[string]
keyword[return] identifier[set] (
identifier[row] [ identifier[self] . identifier[_keycol] ] keyword[for] identifier[row] keyword[in] identifier[self] . identifier[_connection] . identifier[execute] (
literal[string] . identifier[format] (
identifier[self] . identifier[selkeycol] ,
identifier[self] . identifier[table] ,
identifier[self] . identifier[_keycol]
)
)
) | def keys(self):
"""D.keys() -> a set-like object providing a view on D's keys"""
return set((row[self._keycol] for row in self._connection.execute('SELECT DISTINCT {} FROM {} ORDER BY {} ASC;'.format(self.selkeycol, self.table, self._keycol)))) |
def _get_data_volumes(vm_):
'''
Construct a list of optional data volumes from the cloud profile
'''
ret = []
volumes = vm_['volumes']
for key, value in six.iteritems(volumes):
# Verify the required 'disk_size' property is present in the cloud
# profile config
if 'disk_size' not in volumes[key].keys():
raise SaltCloudConfigError(
'The volume \'{0}\' is missing \'disk_size\''.format(key)
)
# Use 'HDD' if no 'disk_type' property is present in cloud profile
if 'disk_type' not in volumes[key].keys():
volumes[key]['disk_type'] = 'HDD'
# Construct volume object and assign to a list.
volume = Volume(
name=key,
size=volumes[key]['disk_size'],
disk_type=volumes[key]['disk_type'],
licence_type='OTHER'
)
# Set volume availability zone if defined in the cloud profile
if 'disk_availability_zone' in volumes[key].keys():
volume.availability_zone = volumes[key]['disk_availability_zone']
ret.append(volume)
return ret | def function[_get_data_volumes, parameter[vm_]]:
constant[
Construct a list of optional data volumes from the cloud profile
]
variable[ret] assign[=] list[[]]
variable[volumes] assign[=] call[name[vm_]][constant[volumes]]
for taget[tuple[[<ast.Name object at 0x7da1b2088a90>, <ast.Name object at 0x7da1b208b7f0>]]] in starred[call[name[six].iteritems, parameter[name[volumes]]]] begin[:]
if compare[constant[disk_size] <ast.NotIn object at 0x7da2590d7190> call[call[name[volumes]][name[key]].keys, parameter[]]] begin[:]
<ast.Raise object at 0x7da1b2088ee0>
if compare[constant[disk_type] <ast.NotIn object at 0x7da2590d7190> call[call[name[volumes]][name[key]].keys, parameter[]]] begin[:]
call[call[name[volumes]][name[key]]][constant[disk_type]] assign[=] constant[HDD]
variable[volume] assign[=] call[name[Volume], parameter[]]
if compare[constant[disk_availability_zone] in call[call[name[volumes]][name[key]].keys, parameter[]]] begin[:]
name[volume].availability_zone assign[=] call[call[name[volumes]][name[key]]][constant[disk_availability_zone]]
call[name[ret].append, parameter[name[volume]]]
return[name[ret]] | keyword[def] identifier[_get_data_volumes] ( identifier[vm_] ):
literal[string]
identifier[ret] =[]
identifier[volumes] = identifier[vm_] [ literal[string] ]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[six] . identifier[iteritems] ( identifier[volumes] ):
keyword[if] literal[string] keyword[not] keyword[in] identifier[volumes] [ identifier[key] ]. identifier[keys] ():
keyword[raise] identifier[SaltCloudConfigError] (
literal[string] . identifier[format] ( identifier[key] )
)
keyword[if] literal[string] keyword[not] keyword[in] identifier[volumes] [ identifier[key] ]. identifier[keys] ():
identifier[volumes] [ identifier[key] ][ literal[string] ]= literal[string]
identifier[volume] = identifier[Volume] (
identifier[name] = identifier[key] ,
identifier[size] = identifier[volumes] [ identifier[key] ][ literal[string] ],
identifier[disk_type] = identifier[volumes] [ identifier[key] ][ literal[string] ],
identifier[licence_type] = literal[string]
)
keyword[if] literal[string] keyword[in] identifier[volumes] [ identifier[key] ]. identifier[keys] ():
identifier[volume] . identifier[availability_zone] = identifier[volumes] [ identifier[key] ][ literal[string] ]
identifier[ret] . identifier[append] ( identifier[volume] )
keyword[return] identifier[ret] | def _get_data_volumes(vm_):
"""
Construct a list of optional data volumes from the cloud profile
"""
ret = []
volumes = vm_['volumes']
for (key, value) in six.iteritems(volumes):
# Verify the required 'disk_size' property is present in the cloud
# profile config
if 'disk_size' not in volumes[key].keys():
raise SaltCloudConfigError("The volume '{0}' is missing 'disk_size'".format(key)) # depends on [control=['if'], data=[]]
# Use 'HDD' if no 'disk_type' property is present in cloud profile
if 'disk_type' not in volumes[key].keys():
volumes[key]['disk_type'] = 'HDD' # depends on [control=['if'], data=[]]
# Construct volume object and assign to a list.
volume = Volume(name=key, size=volumes[key]['disk_size'], disk_type=volumes[key]['disk_type'], licence_type='OTHER')
# Set volume availability zone if defined in the cloud profile
if 'disk_availability_zone' in volumes[key].keys():
volume.availability_zone = volumes[key]['disk_availability_zone'] # depends on [control=['if'], data=[]]
ret.append(volume) # depends on [control=['for'], data=[]]
return ret |
def _addrinfo_or_none(contact_point, port):
"""
A helper function that wraps socket.getaddrinfo and returns None
when it fails to, e.g. resolve one of the hostnames. Used to address
PYTHON-895.
"""
try:
return socket.getaddrinfo(contact_point, port,
socket.AF_UNSPEC, socket.SOCK_STREAM)
except socket.gaierror:
log.debug('Could not resolve hostname "{}" '
'with port {}'.format(contact_point, port))
return None | def function[_addrinfo_or_none, parameter[contact_point, port]]:
constant[
A helper function that wraps socket.getaddrinfo and returns None
when it fails to, e.g. resolve one of the hostnames. Used to address
PYTHON-895.
]
<ast.Try object at 0x7da1b22af3d0> | keyword[def] identifier[_addrinfo_or_none] ( identifier[contact_point] , identifier[port] ):
literal[string]
keyword[try] :
keyword[return] identifier[socket] . identifier[getaddrinfo] ( identifier[contact_point] , identifier[port] ,
identifier[socket] . identifier[AF_UNSPEC] , identifier[socket] . identifier[SOCK_STREAM] )
keyword[except] identifier[socket] . identifier[gaierror] :
identifier[log] . identifier[debug] ( literal[string]
literal[string] . identifier[format] ( identifier[contact_point] , identifier[port] ))
keyword[return] keyword[None] | def _addrinfo_or_none(contact_point, port):
"""
A helper function that wraps socket.getaddrinfo and returns None
when it fails to, e.g. resolve one of the hostnames. Used to address
PYTHON-895.
"""
try:
return socket.getaddrinfo(contact_point, port, socket.AF_UNSPEC, socket.SOCK_STREAM) # depends on [control=['try'], data=[]]
except socket.gaierror:
log.debug('Could not resolve hostname "{}" with port {}'.format(contact_point, port))
return None # depends on [control=['except'], data=[]] |
def ratio(self, internal_standard=None):
"""
Calculates the ratio of all analytes to a single analyte.
Parameters
----------
internal_standard : str
The name of the analyte to divide all other analytes
by.
Returns
-------
None
"""
if 'bkgsub' not in self.stages_complete:
raise RuntimeError('Cannot calculate ratios before background subtraction.')
if internal_standard is not None:
self.internal_standard = internal_standard
self.minimal_analytes.update([internal_standard])
with self.pbar.set(total=len(self.data), desc='Ratio Calculation') as prog:
for s in self.data.values():
s.ratio(internal_standard=self.internal_standard)
prog.update()
self.stages_complete.update(['ratios'])
self.focus_stage = 'ratios'
return | def function[ratio, parameter[self, internal_standard]]:
constant[
Calculates the ratio of all analytes to a single analyte.
Parameters
----------
internal_standard : str
The name of the analyte to divide all other analytes
by.
Returns
-------
None
]
if compare[constant[bkgsub] <ast.NotIn object at 0x7da2590d7190> name[self].stages_complete] begin[:]
<ast.Raise object at 0x7da1b023e4d0>
if compare[name[internal_standard] is_not constant[None]] begin[:]
name[self].internal_standard assign[=] name[internal_standard]
call[name[self].minimal_analytes.update, parameter[list[[<ast.Name object at 0x7da1b01c7340>]]]]
with call[name[self].pbar.set, parameter[]] begin[:]
for taget[name[s]] in starred[call[name[self].data.values, parameter[]]] begin[:]
call[name[s].ratio, parameter[]]
call[name[prog].update, parameter[]]
call[name[self].stages_complete.update, parameter[list[[<ast.Constant object at 0x7da1b01c6b60>]]]]
name[self].focus_stage assign[=] constant[ratios]
return[None] | keyword[def] identifier[ratio] ( identifier[self] , identifier[internal_standard] = keyword[None] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[stages_complete] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
keyword[if] identifier[internal_standard] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[internal_standard] = identifier[internal_standard]
identifier[self] . identifier[minimal_analytes] . identifier[update] ([ identifier[internal_standard] ])
keyword[with] identifier[self] . identifier[pbar] . identifier[set] ( identifier[total] = identifier[len] ( identifier[self] . identifier[data] ), identifier[desc] = literal[string] ) keyword[as] identifier[prog] :
keyword[for] identifier[s] keyword[in] identifier[self] . identifier[data] . identifier[values] ():
identifier[s] . identifier[ratio] ( identifier[internal_standard] = identifier[self] . identifier[internal_standard] )
identifier[prog] . identifier[update] ()
identifier[self] . identifier[stages_complete] . identifier[update] ([ literal[string] ])
identifier[self] . identifier[focus_stage] = literal[string]
keyword[return] | def ratio(self, internal_standard=None):
"""
Calculates the ratio of all analytes to a single analyte.
Parameters
----------
internal_standard : str
The name of the analyte to divide all other analytes
by.
Returns
-------
None
"""
if 'bkgsub' not in self.stages_complete:
raise RuntimeError('Cannot calculate ratios before background subtraction.') # depends on [control=['if'], data=[]]
if internal_standard is not None:
self.internal_standard = internal_standard
self.minimal_analytes.update([internal_standard]) # depends on [control=['if'], data=['internal_standard']]
with self.pbar.set(total=len(self.data), desc='Ratio Calculation') as prog:
for s in self.data.values():
s.ratio(internal_standard=self.internal_standard)
prog.update() # depends on [control=['for'], data=['s']] # depends on [control=['with'], data=['prog']]
self.stages_complete.update(['ratios'])
self.focus_stage = 'ratios'
return |
def _output_dir(
self,
ext,
is_instance=False,
interpolatable=False,
autohinted=False,
is_variable=False,
):
"""Generate an output directory.
Args:
ext: extension string.
is_instance: The output is instance font or not.
interpolatable: The output is interpolatable or not.
autohinted: The output is autohinted or not.
is_variable: The output is variable font or not.
Return:
output directory string.
"""
assert not (is_variable and any([is_instance, interpolatable]))
# FIXME? Use user configurable destination folders.
if is_variable:
dir_prefix = "variable_"
elif is_instance:
dir_prefix = "instance_"
else:
dir_prefix = "master_"
dir_suffix = "_interpolatable" if interpolatable else ""
output_dir = dir_prefix + ext + dir_suffix
if autohinted:
output_dir = os.path.join("autohinted", output_dir)
return output_dir | def function[_output_dir, parameter[self, ext, is_instance, interpolatable, autohinted, is_variable]]:
constant[Generate an output directory.
Args:
ext: extension string.
is_instance: The output is instance font or not.
interpolatable: The output is interpolatable or not.
autohinted: The output is autohinted or not.
is_variable: The output is variable font or not.
Return:
output directory string.
]
assert[<ast.UnaryOp object at 0x7da20c6aa110>]
if name[is_variable] begin[:]
variable[dir_prefix] assign[=] constant[variable_]
variable[dir_suffix] assign[=] <ast.IfExp object at 0x7da20c6a9090>
variable[output_dir] assign[=] binary_operation[binary_operation[name[dir_prefix] + name[ext]] + name[dir_suffix]]
if name[autohinted] begin[:]
variable[output_dir] assign[=] call[name[os].path.join, parameter[constant[autohinted], name[output_dir]]]
return[name[output_dir]] | keyword[def] identifier[_output_dir] (
identifier[self] ,
identifier[ext] ,
identifier[is_instance] = keyword[False] ,
identifier[interpolatable] = keyword[False] ,
identifier[autohinted] = keyword[False] ,
identifier[is_variable] = keyword[False] ,
):
literal[string]
keyword[assert] keyword[not] ( identifier[is_variable] keyword[and] identifier[any] ([ identifier[is_instance] , identifier[interpolatable] ]))
keyword[if] identifier[is_variable] :
identifier[dir_prefix] = literal[string]
keyword[elif] identifier[is_instance] :
identifier[dir_prefix] = literal[string]
keyword[else] :
identifier[dir_prefix] = literal[string]
identifier[dir_suffix] = literal[string] keyword[if] identifier[interpolatable] keyword[else] literal[string]
identifier[output_dir] = identifier[dir_prefix] + identifier[ext] + identifier[dir_suffix]
keyword[if] identifier[autohinted] :
identifier[output_dir] = identifier[os] . identifier[path] . identifier[join] ( literal[string] , identifier[output_dir] )
keyword[return] identifier[output_dir] | def _output_dir(self, ext, is_instance=False, interpolatable=False, autohinted=False, is_variable=False):
"""Generate an output directory.
Args:
ext: extension string.
is_instance: The output is instance font or not.
interpolatable: The output is interpolatable or not.
autohinted: The output is autohinted or not.
is_variable: The output is variable font or not.
Return:
output directory string.
"""
assert not (is_variable and any([is_instance, interpolatable]))
# FIXME? Use user configurable destination folders.
if is_variable:
dir_prefix = 'variable_' # depends on [control=['if'], data=[]]
elif is_instance:
dir_prefix = 'instance_' # depends on [control=['if'], data=[]]
else:
dir_prefix = 'master_'
dir_suffix = '_interpolatable' if interpolatable else ''
output_dir = dir_prefix + ext + dir_suffix
if autohinted:
output_dir = os.path.join('autohinted', output_dir) # depends on [control=['if'], data=[]]
return output_dir |
def execute(self, command, path=None):
"""Execute command with os.popen and return output."""
logger = logging.getLogger(__name__)
self.check_executable()
logger.debug("Executing command `%s` (cwd: %s)" % (command, path))
process = subprocess.Popen(
command,
shell=True,
cwd=path,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
stdout, stderr = process.communicate()
exit_code = process.wait()
if stdout:
logger.info(stdout.decode("utf-8"))
if stderr:
if exit_code != 0:
logger.error(stderr.decode("utf-8"))
else:
logger.info(stderr.decode("utf-8"))
return process | def function[execute, parameter[self, command, path]]:
constant[Execute command with os.popen and return output.]
variable[logger] assign[=] call[name[logging].getLogger, parameter[name[__name__]]]
call[name[self].check_executable, parameter[]]
call[name[logger].debug, parameter[binary_operation[constant[Executing command `%s` (cwd: %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b026fdf0>, <ast.Name object at 0x7da1b026fbe0>]]]]]
variable[process] assign[=] call[name[subprocess].Popen, parameter[name[command]]]
<ast.Tuple object at 0x7da1b0371240> assign[=] call[name[process].communicate, parameter[]]
variable[exit_code] assign[=] call[name[process].wait, parameter[]]
if name[stdout] begin[:]
call[name[logger].info, parameter[call[name[stdout].decode, parameter[constant[utf-8]]]]]
if name[stderr] begin[:]
if compare[name[exit_code] not_equal[!=] constant[0]] begin[:]
call[name[logger].error, parameter[call[name[stderr].decode, parameter[constant[utf-8]]]]]
return[name[process]] | keyword[def] identifier[execute] ( identifier[self] , identifier[command] , identifier[path] = keyword[None] ):
literal[string]
identifier[logger] = identifier[logging] . identifier[getLogger] ( identifier[__name__] )
identifier[self] . identifier[check_executable] ()
identifier[logger] . identifier[debug] ( literal[string] %( identifier[command] , identifier[path] ))
identifier[process] = identifier[subprocess] . identifier[Popen] (
identifier[command] ,
identifier[shell] = keyword[True] ,
identifier[cwd] = identifier[path] ,
identifier[stdout] = identifier[subprocess] . identifier[PIPE] ,
identifier[stderr] = identifier[subprocess] . identifier[PIPE]
)
identifier[stdout] , identifier[stderr] = identifier[process] . identifier[communicate] ()
identifier[exit_code] = identifier[process] . identifier[wait] ()
keyword[if] identifier[stdout] :
identifier[logger] . identifier[info] ( identifier[stdout] . identifier[decode] ( literal[string] ))
keyword[if] identifier[stderr] :
keyword[if] identifier[exit_code] != literal[int] :
identifier[logger] . identifier[error] ( identifier[stderr] . identifier[decode] ( literal[string] ))
keyword[else] :
identifier[logger] . identifier[info] ( identifier[stderr] . identifier[decode] ( literal[string] ))
keyword[return] identifier[process] | def execute(self, command, path=None):
"""Execute command with os.popen and return output."""
logger = logging.getLogger(__name__)
self.check_executable()
logger.debug('Executing command `%s` (cwd: %s)' % (command, path))
process = subprocess.Popen(command, shell=True, cwd=path, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = process.communicate()
exit_code = process.wait()
if stdout:
logger.info(stdout.decode('utf-8')) # depends on [control=['if'], data=[]]
if stderr:
if exit_code != 0:
logger.error(stderr.decode('utf-8')) # depends on [control=['if'], data=[]]
else:
logger.info(stderr.decode('utf-8')) # depends on [control=['if'], data=[]]
return process |
def find_vm(self, name):
"""
Try and find a VM by name
:param name: Name of the VM
:type name: str
"""
try:
domain = self.hyper.lookupByName(name)
VM = VirtualMachine(domain, self)
except libvirtError:
VM = None
return VM | def function[find_vm, parameter[self, name]]:
constant[
Try and find a VM by name
:param name: Name of the VM
:type name: str
]
<ast.Try object at 0x7da1b27ea1d0>
return[name[VM]] | keyword[def] identifier[find_vm] ( identifier[self] , identifier[name] ):
literal[string]
keyword[try] :
identifier[domain] = identifier[self] . identifier[hyper] . identifier[lookupByName] ( identifier[name] )
identifier[VM] = identifier[VirtualMachine] ( identifier[domain] , identifier[self] )
keyword[except] identifier[libvirtError] :
identifier[VM] = keyword[None]
keyword[return] identifier[VM] | def find_vm(self, name):
"""
Try and find a VM by name
:param name: Name of the VM
:type name: str
"""
try:
domain = self.hyper.lookupByName(name)
VM = VirtualMachine(domain, self) # depends on [control=['try'], data=[]]
except libvirtError:
VM = None # depends on [control=['except'], data=[]]
return VM |
def get_psf_pix(self, ra, dec):
"""
Determine the local psf (a,b,pa) at a given sky location.
The psf is in pixel coordinates.
Parameters
----------
ra, dec : float
The sky position (degrees).
Returns
-------
a, b, pa : float
The psf semi-major axis (pixels), semi-minor axis (pixels), and rotation angle (degrees).
If a psf is defined then it is the psf that is returned, otherwise the image
restoring beam is returned.
"""
psf_sky = self.get_psf_sky(ra, dec)
psf_pix = self.wcshelper.sky2pix_ellipse([ra, dec], psf_sky[0], psf_sky[1], psf_sky[2])[2:]
return psf_pix | def function[get_psf_pix, parameter[self, ra, dec]]:
constant[
Determine the local psf (a,b,pa) at a given sky location.
The psf is in pixel coordinates.
Parameters
----------
ra, dec : float
The sky position (degrees).
Returns
-------
a, b, pa : float
The psf semi-major axis (pixels), semi-minor axis (pixels), and rotation angle (degrees).
If a psf is defined then it is the psf that is returned, otherwise the image
restoring beam is returned.
]
variable[psf_sky] assign[=] call[name[self].get_psf_sky, parameter[name[ra], name[dec]]]
variable[psf_pix] assign[=] call[call[name[self].wcshelper.sky2pix_ellipse, parameter[list[[<ast.Name object at 0x7da20c7cb1f0>, <ast.Name object at 0x7da20c7c9a50>]], call[name[psf_sky]][constant[0]], call[name[psf_sky]][constant[1]], call[name[psf_sky]][constant[2]]]]][<ast.Slice object at 0x7da1b2346bf0>]
return[name[psf_pix]] | keyword[def] identifier[get_psf_pix] ( identifier[self] , identifier[ra] , identifier[dec] ):
literal[string]
identifier[psf_sky] = identifier[self] . identifier[get_psf_sky] ( identifier[ra] , identifier[dec] )
identifier[psf_pix] = identifier[self] . identifier[wcshelper] . identifier[sky2pix_ellipse] ([ identifier[ra] , identifier[dec] ], identifier[psf_sky] [ literal[int] ], identifier[psf_sky] [ literal[int] ], identifier[psf_sky] [ literal[int] ])[ literal[int] :]
keyword[return] identifier[psf_pix] | def get_psf_pix(self, ra, dec):
"""
Determine the local psf (a,b,pa) at a given sky location.
The psf is in pixel coordinates.
Parameters
----------
ra, dec : float
The sky position (degrees).
Returns
-------
a, b, pa : float
The psf semi-major axis (pixels), semi-minor axis (pixels), and rotation angle (degrees).
If a psf is defined then it is the psf that is returned, otherwise the image
restoring beam is returned.
"""
psf_sky = self.get_psf_sky(ra, dec)
psf_pix = self.wcshelper.sky2pix_ellipse([ra, dec], psf_sky[0], psf_sky[1], psf_sky[2])[2:]
return psf_pix |
def const_shuffle(arr, seed=23980):
""" Shuffle an array in-place with a fixed seed.
"""
old_seed = np.random.seed()
np.random.seed(seed)
np.random.shuffle(arr)
np.random.seed(old_seed) | def function[const_shuffle, parameter[arr, seed]]:
constant[ Shuffle an array in-place with a fixed seed.
]
variable[old_seed] assign[=] call[name[np].random.seed, parameter[]]
call[name[np].random.seed, parameter[name[seed]]]
call[name[np].random.shuffle, parameter[name[arr]]]
call[name[np].random.seed, parameter[name[old_seed]]] | keyword[def] identifier[const_shuffle] ( identifier[arr] , identifier[seed] = literal[int] ):
literal[string]
identifier[old_seed] = identifier[np] . identifier[random] . identifier[seed] ()
identifier[np] . identifier[random] . identifier[seed] ( identifier[seed] )
identifier[np] . identifier[random] . identifier[shuffle] ( identifier[arr] )
identifier[np] . identifier[random] . identifier[seed] ( identifier[old_seed] ) | def const_shuffle(arr, seed=23980):
""" Shuffle an array in-place with a fixed seed.
"""
old_seed = np.random.seed()
np.random.seed(seed)
np.random.shuffle(arr)
np.random.seed(old_seed) |
def build_docker_run_command(configuration):
"""
Translate a declarative docker `configuration` to a `docker run` command.
Parameters
----------
configuration : dict
configuration
Returns
-------
args : list
sequence of command line arguments to run a command in a container
"""
parts = configuration.pop('docker').split()
parts.append('run')
run = configuration.pop('run')
# Ensure all env-files have proper paths
if 'env-file' in run:
run['env-file'] = [os.path.join(configuration['workspace'], env_file)
for env_file in run['env-file']]
parts.extend(build_parameter_parts(
run, 'user', 'workdir', 'rm', 'interactive', 'tty', 'env-file', 'cpu-shares', 'name',
'network', 'label', 'memory', 'entrypoint', 'runtime', 'privileged', 'group-add'
))
# Add the mounts
# The following code requires docker >= 17.06
'''for mount in run.pop('mount', []):
if mount['type'] == 'bind':
mount['source'] = os.path.join(
configuration['workspace'], mount['source'])
parts.extend(['--mount', ",".join(["%s=%s" % item for item in mount.items()])])'''
# Add the mounts
for mount in run.pop('mount', []):
if mount['type'] == 'tmpfs':
raise RuntimeError('tmpfs-mounts are currently not supported via the mount ' +
'directive in docker_interface. Consider using the tmpfs ' +
'directive instead.')
if mount['type'] == 'bind':
mount['source'] = os.path.abspath(
os.path.join(configuration['workspace'], mount['source']))
vol_config = '--volume=%s:%s' % (mount['source'], mount['destination'])
if 'readonly' in mount and mount['readonly']:
vol_config += ':ro'
parts.append(vol_config)
# Set or forward environment variables
for key, value in run.pop('env', {}).items():
if value is None:
parts.append('--env=%s' % key)
else:
parts.append('--env=%s=%s' % (key, value))
parts.append('--env=DOCKER_INTERFACE=true')
# Forward ports
for publish in run.pop('publish', []):
parts.append('--publish=%s:%s:%s' % tuple([
publish.get(key, '') for key in "ip host container".split()]))
# Add temporary file systems
for tmpfs in run.pop('tmpfs', []):
destination = tmpfs['destination']
options = tmpfs.pop('options', [])
for key in ['mode', 'size']:
if key in tmpfs:
options.append('%s=%s' % (key, tmpfs[key]))
if options:
destination = "%s:%s" % (destination, ",".join(options))
parts.extend(['--tmpfs', destination])
parts.append(run.pop('image'))
parts.extend(run.pop('cmd', []))
return parts | def function[build_docker_run_command, parameter[configuration]]:
constant[
Translate a declarative docker `configuration` to a `docker run` command.
Parameters
----------
configuration : dict
configuration
Returns
-------
args : list
sequence of command line arguments to run a command in a container
]
variable[parts] assign[=] call[call[name[configuration].pop, parameter[constant[docker]]].split, parameter[]]
call[name[parts].append, parameter[constant[run]]]
variable[run] assign[=] call[name[configuration].pop, parameter[constant[run]]]
if compare[constant[env-file] in name[run]] begin[:]
call[name[run]][constant[env-file]] assign[=] <ast.ListComp object at 0x7da20eb28f70>
call[name[parts].extend, parameter[call[name[build_parameter_parts], parameter[name[run], constant[user], constant[workdir], constant[rm], constant[interactive], constant[tty], constant[env-file], constant[cpu-shares], constant[name], constant[network], constant[label], constant[memory], constant[entrypoint], constant[runtime], constant[privileged], constant[group-add]]]]]
constant[for mount in run.pop('mount', []):
if mount['type'] == 'bind':
mount['source'] = os.path.join(
configuration['workspace'], mount['source'])
parts.extend(['--mount', ",".join(["%s=%s" % item for item in mount.items()])])]
for taget[name[mount]] in starred[call[name[run].pop, parameter[constant[mount], list[[]]]]] begin[:]
if compare[call[name[mount]][constant[type]] equal[==] constant[tmpfs]] begin[:]
<ast.Raise object at 0x7da20e9b1840>
if compare[call[name[mount]][constant[type]] equal[==] constant[bind]] begin[:]
call[name[mount]][constant[source]] assign[=] call[name[os].path.abspath, parameter[call[name[os].path.join, parameter[call[name[configuration]][constant[workspace]], call[name[mount]][constant[source]]]]]]
variable[vol_config] assign[=] binary_operation[constant[--volume=%s:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da20e9b0970>, <ast.Subscript object at 0x7da20e9b33d0>]]]
if <ast.BoolOp object at 0x7da20e9b3f10> begin[:]
<ast.AugAssign object at 0x7da20e9b2b90>
call[name[parts].append, parameter[name[vol_config]]]
for taget[tuple[[<ast.Name object at 0x7da20e9b19c0>, <ast.Name object at 0x7da20e9b3460>]]] in starred[call[call[name[run].pop, parameter[constant[env], dictionary[[], []]]].items, parameter[]]] begin[:]
if compare[name[value] is constant[None]] begin[:]
call[name[parts].append, parameter[binary_operation[constant[--env=%s] <ast.Mod object at 0x7da2590d6920> name[key]]]]
call[name[parts].append, parameter[constant[--env=DOCKER_INTERFACE=true]]]
for taget[name[publish]] in starred[call[name[run].pop, parameter[constant[publish], list[[]]]]] begin[:]
call[name[parts].append, parameter[binary_operation[constant[--publish=%s:%s:%s] <ast.Mod object at 0x7da2590d6920> call[name[tuple], parameter[<ast.ListComp object at 0x7da18eb57a00>]]]]]
for taget[name[tmpfs]] in starred[call[name[run].pop, parameter[constant[tmpfs], list[[]]]]] begin[:]
variable[destination] assign[=] call[name[tmpfs]][constant[destination]]
variable[options] assign[=] call[name[tmpfs].pop, parameter[constant[options], list[[]]]]
for taget[name[key]] in starred[list[[<ast.Constant object at 0x7da18eb56ce0>, <ast.Constant object at 0x7da18eb56620>]]] begin[:]
if compare[name[key] in name[tmpfs]] begin[:]
call[name[options].append, parameter[binary_operation[constant[%s=%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18eb55870>, <ast.Subscript object at 0x7da18eb56140>]]]]]
if name[options] begin[:]
variable[destination] assign[=] binary_operation[constant[%s:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18eb56f20>, <ast.Call object at 0x7da18eb555a0>]]]
call[name[parts].extend, parameter[list[[<ast.Constant object at 0x7da18eb564a0>, <ast.Name object at 0x7da18eb55360>]]]]
call[name[parts].append, parameter[call[name[run].pop, parameter[constant[image]]]]]
call[name[parts].extend, parameter[call[name[run].pop, parameter[constant[cmd], list[[]]]]]]
return[name[parts]] | keyword[def] identifier[build_docker_run_command] ( identifier[configuration] ):
literal[string]
identifier[parts] = identifier[configuration] . identifier[pop] ( literal[string] ). identifier[split] ()
identifier[parts] . identifier[append] ( literal[string] )
identifier[run] = identifier[configuration] . identifier[pop] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[run] :
identifier[run] [ literal[string] ]=[ identifier[os] . identifier[path] . identifier[join] ( identifier[configuration] [ literal[string] ], identifier[env_file] )
keyword[for] identifier[env_file] keyword[in] identifier[run] [ literal[string] ]]
identifier[parts] . identifier[extend] ( identifier[build_parameter_parts] (
identifier[run] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string]
))
literal[string]
keyword[for] identifier[mount] keyword[in] identifier[run] . identifier[pop] ( literal[string] ,[]):
keyword[if] identifier[mount] [ literal[string] ]== literal[string] :
keyword[raise] identifier[RuntimeError] ( literal[string] +
literal[string] +
literal[string] )
keyword[if] identifier[mount] [ literal[string] ]== literal[string] :
identifier[mount] [ literal[string] ]= identifier[os] . identifier[path] . identifier[abspath] (
identifier[os] . identifier[path] . identifier[join] ( identifier[configuration] [ literal[string] ], identifier[mount] [ literal[string] ]))
identifier[vol_config] = literal[string] %( identifier[mount] [ literal[string] ], identifier[mount] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[mount] keyword[and] identifier[mount] [ literal[string] ]:
identifier[vol_config] += literal[string]
identifier[parts] . identifier[append] ( identifier[vol_config] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[run] . identifier[pop] ( literal[string] ,{}). identifier[items] ():
keyword[if] identifier[value] keyword[is] keyword[None] :
identifier[parts] . identifier[append] ( literal[string] % identifier[key] )
keyword[else] :
identifier[parts] . identifier[append] ( literal[string] %( identifier[key] , identifier[value] ))
identifier[parts] . identifier[append] ( literal[string] )
keyword[for] identifier[publish] keyword[in] identifier[run] . identifier[pop] ( literal[string] ,[]):
identifier[parts] . identifier[append] ( literal[string] % identifier[tuple] ([
identifier[publish] . identifier[get] ( identifier[key] , literal[string] ) keyword[for] identifier[key] keyword[in] literal[string] . identifier[split] ()]))
keyword[for] identifier[tmpfs] keyword[in] identifier[run] . identifier[pop] ( literal[string] ,[]):
identifier[destination] = identifier[tmpfs] [ literal[string] ]
identifier[options] = identifier[tmpfs] . identifier[pop] ( literal[string] ,[])
keyword[for] identifier[key] keyword[in] [ literal[string] , literal[string] ]:
keyword[if] identifier[key] keyword[in] identifier[tmpfs] :
identifier[options] . identifier[append] ( literal[string] %( identifier[key] , identifier[tmpfs] [ identifier[key] ]))
keyword[if] identifier[options] :
identifier[destination] = literal[string] %( identifier[destination] , literal[string] . identifier[join] ( identifier[options] ))
identifier[parts] . identifier[extend] ([ literal[string] , identifier[destination] ])
identifier[parts] . identifier[append] ( identifier[run] . identifier[pop] ( literal[string] ))
identifier[parts] . identifier[extend] ( identifier[run] . identifier[pop] ( literal[string] ,[]))
keyword[return] identifier[parts] | def build_docker_run_command(configuration):
"""
Translate a declarative docker `configuration` to a `docker run` command.
Parameters
----------
configuration : dict
configuration
Returns
-------
args : list
sequence of command line arguments to run a command in a container
"""
parts = configuration.pop('docker').split()
parts.append('run')
run = configuration.pop('run')
# Ensure all env-files have proper paths
if 'env-file' in run:
run['env-file'] = [os.path.join(configuration['workspace'], env_file) for env_file in run['env-file']] # depends on [control=['if'], data=['run']]
parts.extend(build_parameter_parts(run, 'user', 'workdir', 'rm', 'interactive', 'tty', 'env-file', 'cpu-shares', 'name', 'network', 'label', 'memory', 'entrypoint', 'runtime', 'privileged', 'group-add'))
# Add the mounts
# The following code requires docker >= 17.06
'for mount in run.pop(\'mount\', []):\n if mount[\'type\'] == \'bind\':\n mount[\'source\'] = os.path.join(\n configuration[\'workspace\'], mount[\'source\'])\n parts.extend([\'--mount\', ",".join(["%s=%s" % item for item in mount.items()])])'
# Add the mounts
for mount in run.pop('mount', []):
if mount['type'] == 'tmpfs':
raise RuntimeError('tmpfs-mounts are currently not supported via the mount ' + 'directive in docker_interface. Consider using the tmpfs ' + 'directive instead.') # depends on [control=['if'], data=[]]
if mount['type'] == 'bind':
mount['source'] = os.path.abspath(os.path.join(configuration['workspace'], mount['source'])) # depends on [control=['if'], data=[]]
vol_config = '--volume=%s:%s' % (mount['source'], mount['destination'])
if 'readonly' in mount and mount['readonly']:
vol_config += ':ro' # depends on [control=['if'], data=[]]
parts.append(vol_config) # depends on [control=['for'], data=['mount']]
# Set or forward environment variables
for (key, value) in run.pop('env', {}).items():
if value is None:
parts.append('--env=%s' % key) # depends on [control=['if'], data=[]]
else:
parts.append('--env=%s=%s' % (key, value)) # depends on [control=['for'], data=[]]
parts.append('--env=DOCKER_INTERFACE=true')
# Forward ports
for publish in run.pop('publish', []):
parts.append('--publish=%s:%s:%s' % tuple([publish.get(key, '') for key in 'ip host container'.split()])) # depends on [control=['for'], data=['publish']]
# Add temporary file systems
for tmpfs in run.pop('tmpfs', []):
destination = tmpfs['destination']
options = tmpfs.pop('options', [])
for key in ['mode', 'size']:
if key in tmpfs:
options.append('%s=%s' % (key, tmpfs[key])) # depends on [control=['if'], data=['key', 'tmpfs']] # depends on [control=['for'], data=['key']]
if options:
destination = '%s:%s' % (destination, ','.join(options)) # depends on [control=['if'], data=[]]
parts.extend(['--tmpfs', destination]) # depends on [control=['for'], data=['tmpfs']]
parts.append(run.pop('image'))
parts.extend(run.pop('cmd', []))
return parts |
def get_identities(self, item):
"""Return the identities from an item"""
item = item['data']
# Changeset owner
user = item['owner']
identity = self.get_sh_identity(user)
yield identity
# Patchset uploader and author
if 'patchSets' in item:
for patchset in item['patchSets']:
user = patchset['uploader']
identity = self.get_sh_identity(user)
yield identity
if 'author' in patchset:
user = patchset['author']
identity = self.get_sh_identity(user)
yield identity
if 'approvals' in patchset:
# Approvals by
for approval in patchset['approvals']:
user = approval['by']
identity = self.get_sh_identity(user)
yield identity
# Comments reviewers
if 'comments' in item:
for comment in item['comments']:
user = comment['reviewer']
identity = self.get_sh_identity(user)
yield identity | def function[get_identities, parameter[self, item]]:
constant[Return the identities from an item]
variable[item] assign[=] call[name[item]][constant[data]]
variable[user] assign[=] call[name[item]][constant[owner]]
variable[identity] assign[=] call[name[self].get_sh_identity, parameter[name[user]]]
<ast.Yield object at 0x7da1b0f2b1f0>
if compare[constant[patchSets] in name[item]] begin[:]
for taget[name[patchset]] in starred[call[name[item]][constant[patchSets]]] begin[:]
variable[user] assign[=] call[name[patchset]][constant[uploader]]
variable[identity] assign[=] call[name[self].get_sh_identity, parameter[name[user]]]
<ast.Yield object at 0x7da1b0f2b8b0>
if compare[constant[author] in name[patchset]] begin[:]
variable[user] assign[=] call[name[patchset]][constant[author]]
variable[identity] assign[=] call[name[self].get_sh_identity, parameter[name[user]]]
<ast.Yield object at 0x7da1b0f2bfd0>
if compare[constant[approvals] in name[patchset]] begin[:]
for taget[name[approval]] in starred[call[name[patchset]][constant[approvals]]] begin[:]
variable[user] assign[=] call[name[approval]][constant[by]]
variable[identity] assign[=] call[name[self].get_sh_identity, parameter[name[user]]]
<ast.Yield object at 0x7da18dc99f30>
if compare[constant[comments] in name[item]] begin[:]
for taget[name[comment]] in starred[call[name[item]][constant[comments]]] begin[:]
variable[user] assign[=] call[name[comment]][constant[reviewer]]
variable[identity] assign[=] call[name[self].get_sh_identity, parameter[name[user]]]
<ast.Yield object at 0x7da18dc9b760> | keyword[def] identifier[get_identities] ( identifier[self] , identifier[item] ):
literal[string]
identifier[item] = identifier[item] [ literal[string] ]
identifier[user] = identifier[item] [ literal[string] ]
identifier[identity] = identifier[self] . identifier[get_sh_identity] ( identifier[user] )
keyword[yield] identifier[identity]
keyword[if] literal[string] keyword[in] identifier[item] :
keyword[for] identifier[patchset] keyword[in] identifier[item] [ literal[string] ]:
identifier[user] = identifier[patchset] [ literal[string] ]
identifier[identity] = identifier[self] . identifier[get_sh_identity] ( identifier[user] )
keyword[yield] identifier[identity]
keyword[if] literal[string] keyword[in] identifier[patchset] :
identifier[user] = identifier[patchset] [ literal[string] ]
identifier[identity] = identifier[self] . identifier[get_sh_identity] ( identifier[user] )
keyword[yield] identifier[identity]
keyword[if] literal[string] keyword[in] identifier[patchset] :
keyword[for] identifier[approval] keyword[in] identifier[patchset] [ literal[string] ]:
identifier[user] = identifier[approval] [ literal[string] ]
identifier[identity] = identifier[self] . identifier[get_sh_identity] ( identifier[user] )
keyword[yield] identifier[identity]
keyword[if] literal[string] keyword[in] identifier[item] :
keyword[for] identifier[comment] keyword[in] identifier[item] [ literal[string] ]:
identifier[user] = identifier[comment] [ literal[string] ]
identifier[identity] = identifier[self] . identifier[get_sh_identity] ( identifier[user] )
keyword[yield] identifier[identity] | def get_identities(self, item):
"""Return the identities from an item"""
item = item['data']
# Changeset owner
user = item['owner']
identity = self.get_sh_identity(user)
yield identity
# Patchset uploader and author
if 'patchSets' in item:
for patchset in item['patchSets']:
user = patchset['uploader']
identity = self.get_sh_identity(user)
yield identity
if 'author' in patchset:
user = patchset['author']
identity = self.get_sh_identity(user)
yield identity # depends on [control=['if'], data=['patchset']]
if 'approvals' in patchset:
# Approvals by
for approval in patchset['approvals']:
user = approval['by']
identity = self.get_sh_identity(user)
yield identity # depends on [control=['for'], data=['approval']] # depends on [control=['if'], data=['patchset']] # depends on [control=['for'], data=['patchset']] # depends on [control=['if'], data=['item']]
# Comments reviewers
if 'comments' in item:
for comment in item['comments']:
user = comment['reviewer']
identity = self.get_sh_identity(user)
yield identity # depends on [control=['for'], data=['comment']] # depends on [control=['if'], data=['item']] |
def get(self):
"""API endpoint to retrieve a list of links to transaction
outputs.
Returns:
A :obj:`list` of :cls:`str` of links to outputs.
"""
parser = reqparse.RequestParser()
parser.add_argument('public_key', type=parameters.valid_ed25519,
required=True)
parser.add_argument('spent', type=parameters.valid_bool)
args = parser.parse_args(strict=True)
pool = current_app.config['bigchain_pool']
with pool() as bigchain:
outputs = bigchain.get_outputs_filtered(args['public_key'],
args['spent'])
return [{'transaction_id': output.txid, 'output_index': output.output}
for output in outputs] | def function[get, parameter[self]]:
constant[API endpoint to retrieve a list of links to transaction
outputs.
Returns:
A :obj:`list` of :cls:`str` of links to outputs.
]
variable[parser] assign[=] call[name[reqparse].RequestParser, parameter[]]
call[name[parser].add_argument, parameter[constant[public_key]]]
call[name[parser].add_argument, parameter[constant[spent]]]
variable[args] assign[=] call[name[parser].parse_args, parameter[]]
variable[pool] assign[=] call[name[current_app].config][constant[bigchain_pool]]
with call[name[pool], parameter[]] begin[:]
variable[outputs] assign[=] call[name[bigchain].get_outputs_filtered, parameter[call[name[args]][constant[public_key]], call[name[args]][constant[spent]]]]
return[<ast.ListComp object at 0x7da1b1bbadd0>] | keyword[def] identifier[get] ( identifier[self] ):
literal[string]
identifier[parser] = identifier[reqparse] . identifier[RequestParser] ()
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[parameters] . identifier[valid_ed25519] ,
identifier[required] = keyword[True] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[parameters] . identifier[valid_bool] )
identifier[args] = identifier[parser] . identifier[parse_args] ( identifier[strict] = keyword[True] )
identifier[pool] = identifier[current_app] . identifier[config] [ literal[string] ]
keyword[with] identifier[pool] () keyword[as] identifier[bigchain] :
identifier[outputs] = identifier[bigchain] . identifier[get_outputs_filtered] ( identifier[args] [ literal[string] ],
identifier[args] [ literal[string] ])
keyword[return] [{ literal[string] : identifier[output] . identifier[txid] , literal[string] : identifier[output] . identifier[output] }
keyword[for] identifier[output] keyword[in] identifier[outputs] ] | def get(self):
"""API endpoint to retrieve a list of links to transaction
outputs.
Returns:
A :obj:`list` of :cls:`str` of links to outputs.
"""
parser = reqparse.RequestParser()
parser.add_argument('public_key', type=parameters.valid_ed25519, required=True)
parser.add_argument('spent', type=parameters.valid_bool)
args = parser.parse_args(strict=True)
pool = current_app.config['bigchain_pool']
with pool() as bigchain:
outputs = bigchain.get_outputs_filtered(args['public_key'], args['spent'])
return [{'transaction_id': output.txid, 'output_index': output.output} for output in outputs] # depends on [control=['with'], data=['bigchain']] |
def from_path(list_of_dir):
"""Create a new FileCollection and add all files from ``dir_path``.
:param list_of_dir: absolute dir path, WinDir instance, list of
absolute dir path or list of WinDir instance.
**中文文档**
添加dir_path目录下的所有文件到一个新的FileCollection中.
"""
if isinstance(list_of_dir, str):
list_of_dir = [list_of_dir, ]
elif isinstance(list_of_dir, WinDir):
list_of_dir = [list_of_dir.abspath, ]
elif isinstance(list_of_dir, list):
list_of_dir = [str(i) for i in list_of_dir]
fc = FileCollection()
for dir_path in list_of_dir:
for winfile in FileCollection.yield_all_winfile(dir_path):
fc.files.setdefault(winfile.abspath, winfile)
return fc | def function[from_path, parameter[list_of_dir]]:
constant[Create a new FileCollection and add all files from ``dir_path``.
:param list_of_dir: absolute dir path, WinDir instance, list of
absolute dir path or list of WinDir instance.
**中文文档**
添加dir_path目录下的所有文件到一个新的FileCollection中.
]
if call[name[isinstance], parameter[name[list_of_dir], name[str]]] begin[:]
variable[list_of_dir] assign[=] list[[<ast.Name object at 0x7da2044c3850>]]
variable[fc] assign[=] call[name[FileCollection], parameter[]]
for taget[name[dir_path]] in starred[name[list_of_dir]] begin[:]
for taget[name[winfile]] in starred[call[name[FileCollection].yield_all_winfile, parameter[name[dir_path]]]] begin[:]
call[name[fc].files.setdefault, parameter[name[winfile].abspath, name[winfile]]]
return[name[fc]] | keyword[def] identifier[from_path] ( identifier[list_of_dir] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[list_of_dir] , identifier[str] ):
identifier[list_of_dir] =[ identifier[list_of_dir] ,]
keyword[elif] identifier[isinstance] ( identifier[list_of_dir] , identifier[WinDir] ):
identifier[list_of_dir] =[ identifier[list_of_dir] . identifier[abspath] ,]
keyword[elif] identifier[isinstance] ( identifier[list_of_dir] , identifier[list] ):
identifier[list_of_dir] =[ identifier[str] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[list_of_dir] ]
identifier[fc] = identifier[FileCollection] ()
keyword[for] identifier[dir_path] keyword[in] identifier[list_of_dir] :
keyword[for] identifier[winfile] keyword[in] identifier[FileCollection] . identifier[yield_all_winfile] ( identifier[dir_path] ):
identifier[fc] . identifier[files] . identifier[setdefault] ( identifier[winfile] . identifier[abspath] , identifier[winfile] )
keyword[return] identifier[fc] | def from_path(list_of_dir):
"""Create a new FileCollection and add all files from ``dir_path``.
:param list_of_dir: absolute dir path, WinDir instance, list of
absolute dir path or list of WinDir instance.
**中文文档**
添加dir_path目录下的所有文件到一个新的FileCollection中.
"""
if isinstance(list_of_dir, str):
list_of_dir = [list_of_dir] # depends on [control=['if'], data=[]]
elif isinstance(list_of_dir, WinDir):
list_of_dir = [list_of_dir.abspath] # depends on [control=['if'], data=[]]
elif isinstance(list_of_dir, list):
list_of_dir = [str(i) for i in list_of_dir] # depends on [control=['if'], data=[]]
fc = FileCollection()
for dir_path in list_of_dir:
for winfile in FileCollection.yield_all_winfile(dir_path):
fc.files.setdefault(winfile.abspath, winfile) # depends on [control=['for'], data=['winfile']] # depends on [control=['for'], data=['dir_path']]
return fc |
def init(self, advertise_addr=None, listen_addr='0.0.0.0:2377',
force_new_cluster=False, default_addr_pool=None,
subnet_size=None, **kwargs):
"""
Initialize a new swarm on this Engine.
Args:
advertise_addr (str): Externally reachable address advertised to
other nodes. This can either be an address/port combination in
the form ``192.168.1.1:4567``, or an interface followed by a
port number, like ``eth0:4567``. If the port number is omitted,
the port number from the listen address is used.
If not specified, it will be automatically detected when
possible.
listen_addr (str): Listen address used for inter-manager
communication, as well as determining the networking interface
used for the VXLAN Tunnel Endpoint (VTEP). This can either be
an address/port combination in the form ``192.168.1.1:4567``,
or an interface followed by a port number, like ``eth0:4567``.
If the port number is omitted, the default swarm listening port
is used. Default: ``0.0.0.0:2377``
force_new_cluster (bool): Force creating a new Swarm, even if
already part of one. Default: False
default_addr_pool (list of str): Default Address Pool specifies
default subnet pools for global scope networks. Each pool
should be specified as a CIDR block, like '10.0.0.0/8'.
Default: None
subnet_size (int): SubnetSize specifies the subnet size of the
networks created from the default subnet pool. Default: None
task_history_retention_limit (int): Maximum number of tasks
history stored.
snapshot_interval (int): Number of logs entries between snapshot.
keep_old_snapshots (int): Number of snapshots to keep beyond the
current snapshot.
log_entries_for_slow_followers (int): Number of log entries to
keep around to sync up slow followers after a snapshot is
created.
heartbeat_tick (int): Amount of ticks (in seconds) between each
heartbeat.
election_tick (int): Amount of ticks (in seconds) needed without a
leader to trigger a new election.
dispatcher_heartbeat_period (int): The delay for an agent to send
a heartbeat to the dispatcher.
node_cert_expiry (int): Automatic expiry for nodes certificates.
external_ca (dict): Configuration for forwarding signing requests
to an external certificate authority. Use
``docker.types.SwarmExternalCA``.
name (string): Swarm's name
labels (dict): User-defined key/value metadata.
signing_ca_cert (str): The desired signing CA certificate for all
swarm node TLS leaf certificates, in PEM format.
signing_ca_key (str): The desired signing CA key for all swarm
node TLS leaf certificates, in PEM format.
ca_force_rotate (int): An integer whose purpose is to force swarm
to generate a new signing CA certificate and key, if none have
been specified.
autolock_managers (boolean): If set, generate a key and use it to
lock data stored on the managers.
log_driver (DriverConfig): The default log driver to use for tasks
created in the orchestrator.
Returns:
``True`` if the request went through.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
Example:
>>> client.swarm.init(
advertise_addr='eth0', listen_addr='0.0.0.0:5000',
force_new_cluster=False, default_addr_pool=['10.20.0.0/16],
subnet_size=24, snapshot_interval=5000,
log_entries_for_slow_followers=1200
)
"""
init_kwargs = {
'advertise_addr': advertise_addr,
'listen_addr': listen_addr,
'force_new_cluster': force_new_cluster,
'default_addr_pool': default_addr_pool,
'subnet_size': subnet_size
}
init_kwargs['swarm_spec'] = self.client.api.create_swarm_spec(**kwargs)
self.client.api.init_swarm(**init_kwargs)
self.reload()
return True | def function[init, parameter[self, advertise_addr, listen_addr, force_new_cluster, default_addr_pool, subnet_size]]:
constant[
Initialize a new swarm on this Engine.
Args:
advertise_addr (str): Externally reachable address advertised to
other nodes. This can either be an address/port combination in
the form ``192.168.1.1:4567``, or an interface followed by a
port number, like ``eth0:4567``. If the port number is omitted,
the port number from the listen address is used.
If not specified, it will be automatically detected when
possible.
listen_addr (str): Listen address used for inter-manager
communication, as well as determining the networking interface
used for the VXLAN Tunnel Endpoint (VTEP). This can either be
an address/port combination in the form ``192.168.1.1:4567``,
or an interface followed by a port number, like ``eth0:4567``.
If the port number is omitted, the default swarm listening port
is used. Default: ``0.0.0.0:2377``
force_new_cluster (bool): Force creating a new Swarm, even if
already part of one. Default: False
default_addr_pool (list of str): Default Address Pool specifies
default subnet pools for global scope networks. Each pool
should be specified as a CIDR block, like '10.0.0.0/8'.
Default: None
subnet_size (int): SubnetSize specifies the subnet size of the
networks created from the default subnet pool. Default: None
task_history_retention_limit (int): Maximum number of tasks
history stored.
snapshot_interval (int): Number of logs entries between snapshot.
keep_old_snapshots (int): Number of snapshots to keep beyond the
current snapshot.
log_entries_for_slow_followers (int): Number of log entries to
keep around to sync up slow followers after a snapshot is
created.
heartbeat_tick (int): Amount of ticks (in seconds) between each
heartbeat.
election_tick (int): Amount of ticks (in seconds) needed without a
leader to trigger a new election.
dispatcher_heartbeat_period (int): The delay for an agent to send
a heartbeat to the dispatcher.
node_cert_expiry (int): Automatic expiry for nodes certificates.
external_ca (dict): Configuration for forwarding signing requests
to an external certificate authority. Use
``docker.types.SwarmExternalCA``.
name (string): Swarm's name
labels (dict): User-defined key/value metadata.
signing_ca_cert (str): The desired signing CA certificate for all
swarm node TLS leaf certificates, in PEM format.
signing_ca_key (str): The desired signing CA key for all swarm
node TLS leaf certificates, in PEM format.
ca_force_rotate (int): An integer whose purpose is to force swarm
to generate a new signing CA certificate and key, if none have
been specified.
autolock_managers (boolean): If set, generate a key and use it to
lock data stored on the managers.
log_driver (DriverConfig): The default log driver to use for tasks
created in the orchestrator.
Returns:
``True`` if the request went through.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
Example:
>>> client.swarm.init(
advertise_addr='eth0', listen_addr='0.0.0.0:5000',
force_new_cluster=False, default_addr_pool=['10.20.0.0/16],
subnet_size=24, snapshot_interval=5000,
log_entries_for_slow_followers=1200
)
]
variable[init_kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da18dc9b9d0>, <ast.Constant object at 0x7da18dc99a50>, <ast.Constant object at 0x7da18dc99090>, <ast.Constant object at 0x7da18dc9a380>, <ast.Constant object at 0x7da18dc99360>], [<ast.Name object at 0x7da18dc98ee0>, <ast.Name object at 0x7da18dc984c0>, <ast.Name object at 0x7da18dc995a0>, <ast.Name object at 0x7da18dc9ae30>, <ast.Name object at 0x7da18dc9b370>]]
call[name[init_kwargs]][constant[swarm_spec]] assign[=] call[name[self].client.api.create_swarm_spec, parameter[]]
call[name[self].client.api.init_swarm, parameter[]]
call[name[self].reload, parameter[]]
return[constant[True]] | keyword[def] identifier[init] ( identifier[self] , identifier[advertise_addr] = keyword[None] , identifier[listen_addr] = literal[string] ,
identifier[force_new_cluster] = keyword[False] , identifier[default_addr_pool] = keyword[None] ,
identifier[subnet_size] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[init_kwargs] ={
literal[string] : identifier[advertise_addr] ,
literal[string] : identifier[listen_addr] ,
literal[string] : identifier[force_new_cluster] ,
literal[string] : identifier[default_addr_pool] ,
literal[string] : identifier[subnet_size]
}
identifier[init_kwargs] [ literal[string] ]= identifier[self] . identifier[client] . identifier[api] . identifier[create_swarm_spec] (** identifier[kwargs] )
identifier[self] . identifier[client] . identifier[api] . identifier[init_swarm] (** identifier[init_kwargs] )
identifier[self] . identifier[reload] ()
keyword[return] keyword[True] | def init(self, advertise_addr=None, listen_addr='0.0.0.0:2377', force_new_cluster=False, default_addr_pool=None, subnet_size=None, **kwargs):
"""
Initialize a new swarm on this Engine.
Args:
advertise_addr (str): Externally reachable address advertised to
other nodes. This can either be an address/port combination in
the form ``192.168.1.1:4567``, or an interface followed by a
port number, like ``eth0:4567``. If the port number is omitted,
the port number from the listen address is used.
If not specified, it will be automatically detected when
possible.
listen_addr (str): Listen address used for inter-manager
communication, as well as determining the networking interface
used for the VXLAN Tunnel Endpoint (VTEP). This can either be
an address/port combination in the form ``192.168.1.1:4567``,
or an interface followed by a port number, like ``eth0:4567``.
If the port number is omitted, the default swarm listening port
is used. Default: ``0.0.0.0:2377``
force_new_cluster (bool): Force creating a new Swarm, even if
already part of one. Default: False
default_addr_pool (list of str): Default Address Pool specifies
default subnet pools for global scope networks. Each pool
should be specified as a CIDR block, like '10.0.0.0/8'.
Default: None
subnet_size (int): SubnetSize specifies the subnet size of the
networks created from the default subnet pool. Default: None
task_history_retention_limit (int): Maximum number of tasks
history stored.
snapshot_interval (int): Number of logs entries between snapshot.
keep_old_snapshots (int): Number of snapshots to keep beyond the
current snapshot.
log_entries_for_slow_followers (int): Number of log entries to
keep around to sync up slow followers after a snapshot is
created.
heartbeat_tick (int): Amount of ticks (in seconds) between each
heartbeat.
election_tick (int): Amount of ticks (in seconds) needed without a
leader to trigger a new election.
dispatcher_heartbeat_period (int): The delay for an agent to send
a heartbeat to the dispatcher.
node_cert_expiry (int): Automatic expiry for nodes certificates.
external_ca (dict): Configuration for forwarding signing requests
to an external certificate authority. Use
``docker.types.SwarmExternalCA``.
name (string): Swarm's name
labels (dict): User-defined key/value metadata.
signing_ca_cert (str): The desired signing CA certificate for all
swarm node TLS leaf certificates, in PEM format.
signing_ca_key (str): The desired signing CA key for all swarm
node TLS leaf certificates, in PEM format.
ca_force_rotate (int): An integer whose purpose is to force swarm
to generate a new signing CA certificate and key, if none have
been specified.
autolock_managers (boolean): If set, generate a key and use it to
lock data stored on the managers.
log_driver (DriverConfig): The default log driver to use for tasks
created in the orchestrator.
Returns:
``True`` if the request went through.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
Example:
>>> client.swarm.init(
advertise_addr='eth0', listen_addr='0.0.0.0:5000',
force_new_cluster=False, default_addr_pool=['10.20.0.0/16],
subnet_size=24, snapshot_interval=5000,
log_entries_for_slow_followers=1200
)
"""
init_kwargs = {'advertise_addr': advertise_addr, 'listen_addr': listen_addr, 'force_new_cluster': force_new_cluster, 'default_addr_pool': default_addr_pool, 'subnet_size': subnet_size}
init_kwargs['swarm_spec'] = self.client.api.create_swarm_spec(**kwargs)
self.client.api.init_swarm(**init_kwargs)
self.reload()
return True |
def ip_hide_ext_community_list_holder_extcommunity_list_extcommunity_list_num(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip = ET.SubElement(config, "ip", xmlns="urn:brocade.com:mgmt:brocade-common-def")
hide_ext_community_list_holder = ET.SubElement(ip, "hide-ext-community-list-holder", xmlns="urn:brocade.com:mgmt:brocade-ip-policy")
extcommunity_list = ET.SubElement(hide_ext_community_list_holder, "extcommunity-list")
extcommunity_list_num = ET.SubElement(extcommunity_list, "extcommunity-list-num")
extcommunity_list_num.text = kwargs.pop('extcommunity_list_num')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[ip_hide_ext_community_list_holder_extcommunity_list_extcommunity_list_num, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[ip] assign[=] call[name[ET].SubElement, parameter[name[config], constant[ip]]]
variable[hide_ext_community_list_holder] assign[=] call[name[ET].SubElement, parameter[name[ip], constant[hide-ext-community-list-holder]]]
variable[extcommunity_list] assign[=] call[name[ET].SubElement, parameter[name[hide_ext_community_list_holder], constant[extcommunity-list]]]
variable[extcommunity_list_num] assign[=] call[name[ET].SubElement, parameter[name[extcommunity_list], constant[extcommunity-list-num]]]
name[extcommunity_list_num].text assign[=] call[name[kwargs].pop, parameter[constant[extcommunity_list_num]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[ip_hide_ext_community_list_holder_extcommunity_list_extcommunity_list_num] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[ip] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[hide_ext_community_list_holder] = identifier[ET] . identifier[SubElement] ( identifier[ip] , literal[string] , identifier[xmlns] = literal[string] )
identifier[extcommunity_list] = identifier[ET] . identifier[SubElement] ( identifier[hide_ext_community_list_holder] , literal[string] )
identifier[extcommunity_list_num] = identifier[ET] . identifier[SubElement] ( identifier[extcommunity_list] , literal[string] )
identifier[extcommunity_list_num] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def ip_hide_ext_community_list_holder_extcommunity_list_extcommunity_list_num(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
ip = ET.SubElement(config, 'ip', xmlns='urn:brocade.com:mgmt:brocade-common-def')
hide_ext_community_list_holder = ET.SubElement(ip, 'hide-ext-community-list-holder', xmlns='urn:brocade.com:mgmt:brocade-ip-policy')
extcommunity_list = ET.SubElement(hide_ext_community_list_holder, 'extcommunity-list')
extcommunity_list_num = ET.SubElement(extcommunity_list, 'extcommunity-list-num')
extcommunity_list_num.text = kwargs.pop('extcommunity_list_num')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def main():
"""
Entry point for Ellis.
"""
# Monkey patch warnings.showwarning:
warnings.showwarning = customized_warning
# Read command line args, if any:
args = read_cmdline()
# Configuration file, if given on the command line:
config_file = args['config_file']
try:
ellis = Ellis(config_file)
except NoRuleError:
msg = ("There are no valid rules in the config file. "
"Ellis can not run without rules.")
print_err(msg)
else:
ellis.start() | def function[main, parameter[]]:
constant[
Entry point for Ellis.
]
name[warnings].showwarning assign[=] name[customized_warning]
variable[args] assign[=] call[name[read_cmdline], parameter[]]
variable[config_file] assign[=] call[name[args]][constant[config_file]]
<ast.Try object at 0x7da1b1d37070> | keyword[def] identifier[main] ():
literal[string]
identifier[warnings] . identifier[showwarning] = identifier[customized_warning]
identifier[args] = identifier[read_cmdline] ()
identifier[config_file] = identifier[args] [ literal[string] ]
keyword[try] :
identifier[ellis] = identifier[Ellis] ( identifier[config_file] )
keyword[except] identifier[NoRuleError] :
identifier[msg] =( literal[string]
literal[string] )
identifier[print_err] ( identifier[msg] )
keyword[else] :
identifier[ellis] . identifier[start] () | def main():
"""
Entry point for Ellis.
"""
# Monkey patch warnings.showwarning:
warnings.showwarning = customized_warning
# Read command line args, if any:
args = read_cmdline()
# Configuration file, if given on the command line:
config_file = args['config_file']
try:
ellis = Ellis(config_file) # depends on [control=['try'], data=[]]
except NoRuleError:
msg = 'There are no valid rules in the config file. Ellis can not run without rules.'
print_err(msg) # depends on [control=['except'], data=[]]
else:
ellis.start() |
def file(cls, path, encoding=None, parser=None):
"""Set a file as a source.
File are parsed as literal python dicts by default, this behaviour
can be configured.
Args:
path: The path to the file to be parsed
encoding: The encoding of the file.
Defaults to 'raw'. Available built-in values: 'ini', 'json', 'yaml'.
Custom value can be used in conjunction with parser.
parser: A parser function for a custom encoder.
It is expected to return a dict containing the parsed values
when called with the contents of the file as an argument.
"""
cls.__hierarchy.append(file.File(path, encoding, parser)) | def function[file, parameter[cls, path, encoding, parser]]:
constant[Set a file as a source.
File are parsed as literal python dicts by default, this behaviour
can be configured.
Args:
path: The path to the file to be parsed
encoding: The encoding of the file.
Defaults to 'raw'. Available built-in values: 'ini', 'json', 'yaml'.
Custom value can be used in conjunction with parser.
parser: A parser function for a custom encoder.
It is expected to return a dict containing the parsed values
when called with the contents of the file as an argument.
]
call[name[cls].__hierarchy.append, parameter[call[name[file].File, parameter[name[path], name[encoding], name[parser]]]]] | keyword[def] identifier[file] ( identifier[cls] , identifier[path] , identifier[encoding] = keyword[None] , identifier[parser] = keyword[None] ):
literal[string]
identifier[cls] . identifier[__hierarchy] . identifier[append] ( identifier[file] . identifier[File] ( identifier[path] , identifier[encoding] , identifier[parser] )) | def file(cls, path, encoding=None, parser=None):
"""Set a file as a source.
File are parsed as literal python dicts by default, this behaviour
can be configured.
Args:
path: The path to the file to be parsed
encoding: The encoding of the file.
Defaults to 'raw'. Available built-in values: 'ini', 'json', 'yaml'.
Custom value can be used in conjunction with parser.
parser: A parser function for a custom encoder.
It is expected to return a dict containing the parsed values
when called with the contents of the file as an argument.
"""
cls.__hierarchy.append(file.File(path, encoding, parser)) |
def trace_scan(loop_fn,
initial_state,
elems,
trace_fn,
parallel_iterations=10,
name=None):
"""A simplified version of `tf.scan` that has configurable tracing.
This function repeatedly calls `loop_fn(state, elem)`, where `state` is the
`initial_state` during the first iteration, and the return value of `loop_fn`
for every iteration thereafter. `elem` is a slice of `elements` along the
first dimension, accessed in order. Additionally, it calls `trace_fn` on the
return value of `loop_fn`. The `Tensor`s in return values of `trace_fn` are
stacked and returned from this function, such that the first dimension of
those `Tensor`s matches the size of `elems`.
Args:
loop_fn: A callable that takes in a `Tensor` or a nested collection of
`Tensor`s with the same structure as `initial_state`, a slice of `elems`
and returns the same structure as `initial_state`.
initial_state: A `Tensor` or a nested collection of `Tensor`s passed to
`loop_fn` in the first iteration.
elems: A `Tensor` that is split along the first dimension and each element
of which is passed to `loop_fn`.
trace_fn: A callable that takes in the return value of `loop_fn` and returns
a `Tensor` or a nested collection of `Tensor`s.
parallel_iterations: Passed to the internal `tf.while_loop`.
name: Name scope used in this function. Default: 'trace_scan'.
Returns:
final_state: The final return value of `loop_fn`.
trace: The same structure as the return value of `trace_fn`, but with each
`Tensor` being a stack of the corresponding `Tensors` in the return value
of `trace_fn` for each slice of `elems`.
"""
with tf.compat.v1.name_scope(
name, 'trace_scan', [initial_state, elems]), tf.compat.v1.variable_scope(
tf.compat.v1.get_variable_scope()) as vs:
if vs.caching_device is None and not tf.executing_eagerly():
vs.set_caching_device(lambda op: op.device)
initial_state = tf.nest.map_structure(
lambda x: tf.convert_to_tensor(value=x, name='initial_state'),
initial_state)
elems = tf.convert_to_tensor(value=elems, name='elems')
static_length = elems.shape[0]
if tf.compat.dimension_value(static_length) is None:
length = tf.shape(input=elems)[0]
else:
length = tf.convert_to_tensor(
value=static_length, dtype=tf.int32, name='length')
# This is an TensorArray in part because of XLA, which had trouble with
# non-statically known indices. I.e. elems[i] errored, but
# elems_array.read(i) worked.
elems_array = tf.TensorArray(
elems.dtype, size=length, element_shape=elems.shape[1:])
elems_array = elems_array.unstack(elems)
trace_arrays = tf.nest.map_structure(
lambda x: tf.TensorArray(x.dtype, size=length, element_shape=x.shape),
trace_fn(initial_state))
def _body(i, state, trace_arrays):
state = loop_fn(state, elems_array.read(i))
trace_arrays = tf.nest.pack_sequence_as(trace_arrays, [
a.write(i, v) for a, v in zip(
tf.nest.flatten(trace_arrays), tf.nest.flatten(trace_fn(state)))
])
return i + 1, state, trace_arrays
_, final_state, trace_arrays = tf.while_loop(
cond=lambda i, *args: i < length,
body=_body,
loop_vars=(0, initial_state, trace_arrays),
parallel_iterations=parallel_iterations)
stacked_trace = tf.nest.map_structure(lambda x: x.stack(), trace_arrays)
# Restore the static length if we know it.
def _merge_static_length(x):
x.set_shape(tf.TensorShape(static_length).concatenate(x.shape[1:]))
return x
stacked_trace = tf.nest.map_structure(_merge_static_length, stacked_trace)
return final_state, stacked_trace | def function[trace_scan, parameter[loop_fn, initial_state, elems, trace_fn, parallel_iterations, name]]:
constant[A simplified version of `tf.scan` that has configurable tracing.
This function repeatedly calls `loop_fn(state, elem)`, where `state` is the
`initial_state` during the first iteration, and the return value of `loop_fn`
for every iteration thereafter. `elem` is a slice of `elements` along the
first dimension, accessed in order. Additionally, it calls `trace_fn` on the
return value of `loop_fn`. The `Tensor`s in return values of `trace_fn` are
stacked and returned from this function, such that the first dimension of
those `Tensor`s matches the size of `elems`.
Args:
loop_fn: A callable that takes in a `Tensor` or a nested collection of
`Tensor`s with the same structure as `initial_state`, a slice of `elems`
and returns the same structure as `initial_state`.
initial_state: A `Tensor` or a nested collection of `Tensor`s passed to
`loop_fn` in the first iteration.
elems: A `Tensor` that is split along the first dimension and each element
of which is passed to `loop_fn`.
trace_fn: A callable that takes in the return value of `loop_fn` and returns
a `Tensor` or a nested collection of `Tensor`s.
parallel_iterations: Passed to the internal `tf.while_loop`.
name: Name scope used in this function. Default: 'trace_scan'.
Returns:
final_state: The final return value of `loop_fn`.
trace: The same structure as the return value of `trace_fn`, but with each
`Tensor` being a stack of the corresponding `Tensors` in the return value
of `trace_fn` for each slice of `elems`.
]
with call[name[tf].compat.v1.name_scope, parameter[name[name], constant[trace_scan], list[[<ast.Name object at 0x7da1b02c7b20>, <ast.Name object at 0x7da1b02c7af0>]]]] begin[:]
if <ast.BoolOp object at 0x7da1b02c77f0> begin[:]
call[name[vs].set_caching_device, parameter[<ast.Lambda object at 0x7da1b02c7580>]]
variable[initial_state] assign[=] call[name[tf].nest.map_structure, parameter[<ast.Lambda object at 0x7da1b02c7340>, name[initial_state]]]
variable[elems] assign[=] call[name[tf].convert_to_tensor, parameter[]]
variable[static_length] assign[=] call[name[elems].shape][constant[0]]
if compare[call[name[tf].compat.dimension_value, parameter[name[static_length]]] is constant[None]] begin[:]
variable[length] assign[=] call[call[name[tf].shape, parameter[]]][constant[0]]
variable[elems_array] assign[=] call[name[tf].TensorArray, parameter[name[elems].dtype]]
variable[elems_array] assign[=] call[name[elems_array].unstack, parameter[name[elems]]]
variable[trace_arrays] assign[=] call[name[tf].nest.map_structure, parameter[<ast.Lambda object at 0x7da1b02c63e0>, call[name[trace_fn], parameter[name[initial_state]]]]]
def function[_body, parameter[i, state, trace_arrays]]:
variable[state] assign[=] call[name[loop_fn], parameter[name[state], call[name[elems_array].read, parameter[name[i]]]]]
variable[trace_arrays] assign[=] call[name[tf].nest.pack_sequence_as, parameter[name[trace_arrays], <ast.ListComp object at 0x7da1b02c5cf0>]]
return[tuple[[<ast.BinOp object at 0x7da1b02c57e0>, <ast.Name object at 0x7da1b02c5750>, <ast.Name object at 0x7da1b02c5720>]]]
<ast.Tuple object at 0x7da1b02c5690> assign[=] call[name[tf].while_loop, parameter[]]
variable[stacked_trace] assign[=] call[name[tf].nest.map_structure, parameter[<ast.Lambda object at 0x7da1b02c4190>, name[trace_arrays]]]
def function[_merge_static_length, parameter[x]]:
call[name[x].set_shape, parameter[call[call[name[tf].TensorShape, parameter[name[static_length]]].concatenate, parameter[call[name[x].shape][<ast.Slice object at 0x7da1b02c4760>]]]]]
return[name[x]]
variable[stacked_trace] assign[=] call[name[tf].nest.map_structure, parameter[name[_merge_static_length], name[stacked_trace]]]
return[tuple[[<ast.Name object at 0x7da1b03e2e90>, <ast.Name object at 0x7da1b03e2f80>]]] | keyword[def] identifier[trace_scan] ( identifier[loop_fn] ,
identifier[initial_state] ,
identifier[elems] ,
identifier[trace_fn] ,
identifier[parallel_iterations] = literal[int] ,
identifier[name] = keyword[None] ):
literal[string]
keyword[with] identifier[tf] . identifier[compat] . identifier[v1] . identifier[name_scope] (
identifier[name] , literal[string] ,[ identifier[initial_state] , identifier[elems] ]), identifier[tf] . identifier[compat] . identifier[v1] . identifier[variable_scope] (
identifier[tf] . identifier[compat] . identifier[v1] . identifier[get_variable_scope] ()) keyword[as] identifier[vs] :
keyword[if] identifier[vs] . identifier[caching_device] keyword[is] keyword[None] keyword[and] keyword[not] identifier[tf] . identifier[executing_eagerly] ():
identifier[vs] . identifier[set_caching_device] ( keyword[lambda] identifier[op] : identifier[op] . identifier[device] )
identifier[initial_state] = identifier[tf] . identifier[nest] . identifier[map_structure] (
keyword[lambda] identifier[x] : identifier[tf] . identifier[convert_to_tensor] ( identifier[value] = identifier[x] , identifier[name] = literal[string] ),
identifier[initial_state] )
identifier[elems] = identifier[tf] . identifier[convert_to_tensor] ( identifier[value] = identifier[elems] , identifier[name] = literal[string] )
identifier[static_length] = identifier[elems] . identifier[shape] [ literal[int] ]
keyword[if] identifier[tf] . identifier[compat] . identifier[dimension_value] ( identifier[static_length] ) keyword[is] keyword[None] :
identifier[length] = identifier[tf] . identifier[shape] ( identifier[input] = identifier[elems] )[ literal[int] ]
keyword[else] :
identifier[length] = identifier[tf] . identifier[convert_to_tensor] (
identifier[value] = identifier[static_length] , identifier[dtype] = identifier[tf] . identifier[int32] , identifier[name] = literal[string] )
identifier[elems_array] = identifier[tf] . identifier[TensorArray] (
identifier[elems] . identifier[dtype] , identifier[size] = identifier[length] , identifier[element_shape] = identifier[elems] . identifier[shape] [ literal[int] :])
identifier[elems_array] = identifier[elems_array] . identifier[unstack] ( identifier[elems] )
identifier[trace_arrays] = identifier[tf] . identifier[nest] . identifier[map_structure] (
keyword[lambda] identifier[x] : identifier[tf] . identifier[TensorArray] ( identifier[x] . identifier[dtype] , identifier[size] = identifier[length] , identifier[element_shape] = identifier[x] . identifier[shape] ),
identifier[trace_fn] ( identifier[initial_state] ))
keyword[def] identifier[_body] ( identifier[i] , identifier[state] , identifier[trace_arrays] ):
identifier[state] = identifier[loop_fn] ( identifier[state] , identifier[elems_array] . identifier[read] ( identifier[i] ))
identifier[trace_arrays] = identifier[tf] . identifier[nest] . identifier[pack_sequence_as] ( identifier[trace_arrays] ,[
identifier[a] . identifier[write] ( identifier[i] , identifier[v] ) keyword[for] identifier[a] , identifier[v] keyword[in] identifier[zip] (
identifier[tf] . identifier[nest] . identifier[flatten] ( identifier[trace_arrays] ), identifier[tf] . identifier[nest] . identifier[flatten] ( identifier[trace_fn] ( identifier[state] )))
])
keyword[return] identifier[i] + literal[int] , identifier[state] , identifier[trace_arrays]
identifier[_] , identifier[final_state] , identifier[trace_arrays] = identifier[tf] . identifier[while_loop] (
identifier[cond] = keyword[lambda] identifier[i] ,* identifier[args] : identifier[i] < identifier[length] ,
identifier[body] = identifier[_body] ,
identifier[loop_vars] =( literal[int] , identifier[initial_state] , identifier[trace_arrays] ),
identifier[parallel_iterations] = identifier[parallel_iterations] )
identifier[stacked_trace] = identifier[tf] . identifier[nest] . identifier[map_structure] ( keyword[lambda] identifier[x] : identifier[x] . identifier[stack] (), identifier[trace_arrays] )
keyword[def] identifier[_merge_static_length] ( identifier[x] ):
identifier[x] . identifier[set_shape] ( identifier[tf] . identifier[TensorShape] ( identifier[static_length] ). identifier[concatenate] ( identifier[x] . identifier[shape] [ literal[int] :]))
keyword[return] identifier[x]
identifier[stacked_trace] = identifier[tf] . identifier[nest] . identifier[map_structure] ( identifier[_merge_static_length] , identifier[stacked_trace] )
keyword[return] identifier[final_state] , identifier[stacked_trace] | def trace_scan(loop_fn, initial_state, elems, trace_fn, parallel_iterations=10, name=None):
"""A simplified version of `tf.scan` that has configurable tracing.
This function repeatedly calls `loop_fn(state, elem)`, where `state` is the
`initial_state` during the first iteration, and the return value of `loop_fn`
for every iteration thereafter. `elem` is a slice of `elements` along the
first dimension, accessed in order. Additionally, it calls `trace_fn` on the
return value of `loop_fn`. The `Tensor`s in return values of `trace_fn` are
stacked and returned from this function, such that the first dimension of
those `Tensor`s matches the size of `elems`.
Args:
loop_fn: A callable that takes in a `Tensor` or a nested collection of
`Tensor`s with the same structure as `initial_state`, a slice of `elems`
and returns the same structure as `initial_state`.
initial_state: A `Tensor` or a nested collection of `Tensor`s passed to
`loop_fn` in the first iteration.
elems: A `Tensor` that is split along the first dimension and each element
of which is passed to `loop_fn`.
trace_fn: A callable that takes in the return value of `loop_fn` and returns
a `Tensor` or a nested collection of `Tensor`s.
parallel_iterations: Passed to the internal `tf.while_loop`.
name: Name scope used in this function. Default: 'trace_scan'.
Returns:
final_state: The final return value of `loop_fn`.
trace: The same structure as the return value of `trace_fn`, but with each
`Tensor` being a stack of the corresponding `Tensors` in the return value
of `trace_fn` for each slice of `elems`.
"""
with tf.compat.v1.name_scope(name, 'trace_scan', [initial_state, elems]), tf.compat.v1.variable_scope(tf.compat.v1.get_variable_scope()) as vs:
if vs.caching_device is None and (not tf.executing_eagerly()):
vs.set_caching_device(lambda op: op.device) # depends on [control=['if'], data=[]]
initial_state = tf.nest.map_structure(lambda x: tf.convert_to_tensor(value=x, name='initial_state'), initial_state)
elems = tf.convert_to_tensor(value=elems, name='elems')
static_length = elems.shape[0]
if tf.compat.dimension_value(static_length) is None:
length = tf.shape(input=elems)[0] # depends on [control=['if'], data=[]]
else:
length = tf.convert_to_tensor(value=static_length, dtype=tf.int32, name='length')
# This is an TensorArray in part because of XLA, which had trouble with
# non-statically known indices. I.e. elems[i] errored, but
# elems_array.read(i) worked.
elems_array = tf.TensorArray(elems.dtype, size=length, element_shape=elems.shape[1:])
elems_array = elems_array.unstack(elems)
trace_arrays = tf.nest.map_structure(lambda x: tf.TensorArray(x.dtype, size=length, element_shape=x.shape), trace_fn(initial_state))
def _body(i, state, trace_arrays):
state = loop_fn(state, elems_array.read(i))
trace_arrays = tf.nest.pack_sequence_as(trace_arrays, [a.write(i, v) for (a, v) in zip(tf.nest.flatten(trace_arrays), tf.nest.flatten(trace_fn(state)))])
return (i + 1, state, trace_arrays)
(_, final_state, trace_arrays) = tf.while_loop(cond=lambda i, *args: i < length, body=_body, loop_vars=(0, initial_state, trace_arrays), parallel_iterations=parallel_iterations)
stacked_trace = tf.nest.map_structure(lambda x: x.stack(), trace_arrays)
# Restore the static length if we know it.
def _merge_static_length(x):
x.set_shape(tf.TensorShape(static_length).concatenate(x.shape[1:]))
return x
stacked_trace = tf.nest.map_structure(_merge_static_length, stacked_trace)
return (final_state, stacked_trace) # depends on [control=['with'], data=[]] |
def insert(table, datas, avoid_conflict=False):
""" Insert row from datas
:param table: Safe table name
:param datas: List of dicts.
:param avoid_conflict: Allows ignoring error if already exists (do nothing then)
:return:
"""
if avoid_conflict:
debut = """INSERT INTO {table} {ENTETE_INSERT} VALUES {BIND_INSERT} ON CONFLICT DO NOTHING"""
else:
debut = """INSERT INTO {table} {ENTETE_INSERT} VALUES {BIND_INSERT} RETURNING *"""
l = [abstractRequetesSQL.formate(debut, table=table, INSERT=d, args=d) for d in datas if d]
return Executant(l) | def function[insert, parameter[table, datas, avoid_conflict]]:
constant[ Insert row from datas
:param table: Safe table name
:param datas: List of dicts.
:param avoid_conflict: Allows ignoring error if already exists (do nothing then)
:return:
]
if name[avoid_conflict] begin[:]
variable[debut] assign[=] constant[INSERT INTO {table} {ENTETE_INSERT} VALUES {BIND_INSERT} ON CONFLICT DO NOTHING]
variable[l] assign[=] <ast.ListComp object at 0x7da1b11a33d0>
return[call[name[Executant], parameter[name[l]]]] | keyword[def] identifier[insert] ( identifier[table] , identifier[datas] , identifier[avoid_conflict] = keyword[False] ):
literal[string]
keyword[if] identifier[avoid_conflict] :
identifier[debut] = literal[string]
keyword[else] :
identifier[debut] = literal[string]
identifier[l] =[ identifier[abstractRequetesSQL] . identifier[formate] ( identifier[debut] , identifier[table] = identifier[table] , identifier[INSERT] = identifier[d] , identifier[args] = identifier[d] ) keyword[for] identifier[d] keyword[in] identifier[datas] keyword[if] identifier[d] ]
keyword[return] identifier[Executant] ( identifier[l] ) | def insert(table, datas, avoid_conflict=False):
""" Insert row from datas
:param table: Safe table name
:param datas: List of dicts.
:param avoid_conflict: Allows ignoring error if already exists (do nothing then)
:return:
"""
if avoid_conflict:
debut = 'INSERT INTO {table} {ENTETE_INSERT} VALUES {BIND_INSERT} ON CONFLICT DO NOTHING' # depends on [control=['if'], data=[]]
else:
debut = 'INSERT INTO {table} {ENTETE_INSERT} VALUES {BIND_INSERT} RETURNING *'
l = [abstractRequetesSQL.formate(debut, table=table, INSERT=d, args=d) for d in datas if d]
return Executant(l) |
def as_a_dict(self):
"""
Displays the index as a dictionary. This includes the design document
id, index name, index type, and index definition.
:returns: Dictionary representation of the index as a dictionary
"""
index_dict = {
'ddoc': self._ddoc_id,
'name': self._name,
'type': self._type,
'def': self._def
}
if self._partitioned:
index_dict['partitioned'] = True
return index_dict | def function[as_a_dict, parameter[self]]:
constant[
Displays the index as a dictionary. This includes the design document
id, index name, index type, and index definition.
:returns: Dictionary representation of the index as a dictionary
]
variable[index_dict] assign[=] dictionary[[<ast.Constant object at 0x7da20c76cd90>, <ast.Constant object at 0x7da20c76f550>, <ast.Constant object at 0x7da20c76c190>, <ast.Constant object at 0x7da20c76c310>], [<ast.Attribute object at 0x7da20c76e8c0>, <ast.Attribute object at 0x7da20c76d750>, <ast.Attribute object at 0x7da20c76cdc0>, <ast.Attribute object at 0x7da20c76cd60>]]
if name[self]._partitioned begin[:]
call[name[index_dict]][constant[partitioned]] assign[=] constant[True]
return[name[index_dict]] | keyword[def] identifier[as_a_dict] ( identifier[self] ):
literal[string]
identifier[index_dict] ={
literal[string] : identifier[self] . identifier[_ddoc_id] ,
literal[string] : identifier[self] . identifier[_name] ,
literal[string] : identifier[self] . identifier[_type] ,
literal[string] : identifier[self] . identifier[_def]
}
keyword[if] identifier[self] . identifier[_partitioned] :
identifier[index_dict] [ literal[string] ]= keyword[True]
keyword[return] identifier[index_dict] | def as_a_dict(self):
"""
Displays the index as a dictionary. This includes the design document
id, index name, index type, and index definition.
:returns: Dictionary representation of the index as a dictionary
"""
index_dict = {'ddoc': self._ddoc_id, 'name': self._name, 'type': self._type, 'def': self._def}
if self._partitioned:
index_dict['partitioned'] = True # depends on [control=['if'], data=[]]
return index_dict |
def has_departure_delay(self, subpartcheck=True):
"""
Deprecated
"""
if self.status != 'VOLGENS-PLAN':
return True
if subpartcheck and self.trip_parts[0].has_delay:
return True
if self.requested_time != self.departure_time_actual:
return True
return False | def function[has_departure_delay, parameter[self, subpartcheck]]:
constant[
Deprecated
]
if compare[name[self].status not_equal[!=] constant[VOLGENS-PLAN]] begin[:]
return[constant[True]]
if <ast.BoolOp object at 0x7da1b1d210f0> begin[:]
return[constant[True]]
if compare[name[self].requested_time not_equal[!=] name[self].departure_time_actual] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[has_departure_delay] ( identifier[self] , identifier[subpartcheck] = keyword[True] ):
literal[string]
keyword[if] identifier[self] . identifier[status] != literal[string] :
keyword[return] keyword[True]
keyword[if] identifier[subpartcheck] keyword[and] identifier[self] . identifier[trip_parts] [ literal[int] ]. identifier[has_delay] :
keyword[return] keyword[True]
keyword[if] identifier[self] . identifier[requested_time] != identifier[self] . identifier[departure_time_actual] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def has_departure_delay(self, subpartcheck=True):
"""
Deprecated
"""
if self.status != 'VOLGENS-PLAN':
return True # depends on [control=['if'], data=[]]
if subpartcheck and self.trip_parts[0].has_delay:
return True # depends on [control=['if'], data=[]]
if self.requested_time != self.departure_time_actual:
return True # depends on [control=['if'], data=[]]
return False |
def qsize(self, qname):
"""Return the approximate size of the queue."""
if qname in self._queues:
return self._queues[qname].qsize()
else:
raise ValueError(_("queue %s is not defined"), qname) | def function[qsize, parameter[self, qname]]:
constant[Return the approximate size of the queue.]
if compare[name[qname] in name[self]._queues] begin[:]
return[call[call[name[self]._queues][name[qname]].qsize, parameter[]]] | keyword[def] identifier[qsize] ( identifier[self] , identifier[qname] ):
literal[string]
keyword[if] identifier[qname] keyword[in] identifier[self] . identifier[_queues] :
keyword[return] identifier[self] . identifier[_queues] [ identifier[qname] ]. identifier[qsize] ()
keyword[else] :
keyword[raise] identifier[ValueError] ( identifier[_] ( literal[string] ), identifier[qname] ) | def qsize(self, qname):
"""Return the approximate size of the queue."""
if qname in self._queues:
return self._queues[qname].qsize() # depends on [control=['if'], data=['qname']]
else:
raise ValueError(_('queue %s is not defined'), qname) |
def _ensure_values(data: Mapping[str, Any]) -> Tuple[Dict[str, Any], bool]:
""" Make sure we have appropriate keys and say if we should write """
to_return = {}
should_write = False
for keyname, typekind, default in REQUIRED_DATA:
if keyname not in data:
LOG.debug(f"Defaulted config value {keyname} to {default}")
to_return[keyname] = default
should_write = True
elif not isinstance(data[keyname], typekind):
LOG.warning(
f"Config value {keyname} was {type(data[keyname])} not"
f" {typekind}, defaulted to {default}")
to_return[keyname] = default
should_write = True
else:
to_return[keyname] = data[keyname]
return to_return, should_write | def function[_ensure_values, parameter[data]]:
constant[ Make sure we have appropriate keys and say if we should write ]
variable[to_return] assign[=] dictionary[[], []]
variable[should_write] assign[=] constant[False]
for taget[tuple[[<ast.Name object at 0x7da1b0927280>, <ast.Name object at 0x7da1b0927250>, <ast.Name object at 0x7da1b0926dd0>]]] in starred[name[REQUIRED_DATA]] begin[:]
if compare[name[keyname] <ast.NotIn object at 0x7da2590d7190> name[data]] begin[:]
call[name[LOG].debug, parameter[<ast.JoinedStr object at 0x7da1b0927a30>]]
call[name[to_return]][name[keyname]] assign[=] name[default]
variable[should_write] assign[=] constant[True]
return[tuple[[<ast.Name object at 0x7da1b086d7b0>, <ast.Name object at 0x7da1b086efb0>]]] | keyword[def] identifier[_ensure_values] ( identifier[data] : identifier[Mapping] [ identifier[str] , identifier[Any] ])-> identifier[Tuple] [ identifier[Dict] [ identifier[str] , identifier[Any] ], identifier[bool] ]:
literal[string]
identifier[to_return] ={}
identifier[should_write] = keyword[False]
keyword[for] identifier[keyname] , identifier[typekind] , identifier[default] keyword[in] identifier[REQUIRED_DATA] :
keyword[if] identifier[keyname] keyword[not] keyword[in] identifier[data] :
identifier[LOG] . identifier[debug] ( literal[string] )
identifier[to_return] [ identifier[keyname] ]= identifier[default]
identifier[should_write] = keyword[True]
keyword[elif] keyword[not] identifier[isinstance] ( identifier[data] [ identifier[keyname] ], identifier[typekind] ):
identifier[LOG] . identifier[warning] (
literal[string]
literal[string] )
identifier[to_return] [ identifier[keyname] ]= identifier[default]
identifier[should_write] = keyword[True]
keyword[else] :
identifier[to_return] [ identifier[keyname] ]= identifier[data] [ identifier[keyname] ]
keyword[return] identifier[to_return] , identifier[should_write] | def _ensure_values(data: Mapping[str, Any]) -> Tuple[Dict[str, Any], bool]:
""" Make sure we have appropriate keys and say if we should write """
to_return = {}
should_write = False
for (keyname, typekind, default) in REQUIRED_DATA:
if keyname not in data:
LOG.debug(f'Defaulted config value {keyname} to {default}')
to_return[keyname] = default
should_write = True # depends on [control=['if'], data=['keyname']]
elif not isinstance(data[keyname], typekind):
LOG.warning(f'Config value {keyname} was {type(data[keyname])} not {typekind}, defaulted to {default}')
to_return[keyname] = default
should_write = True # depends on [control=['if'], data=[]]
else:
to_return[keyname] = data[keyname] # depends on [control=['for'], data=[]]
return (to_return, should_write) |
def valid_arxiv_categories():
"""List of all arXiv categories that ever existed.
Example:
>>> from inspire_schemas.utils import valid_arxiv_categories
>>> 'funct-an' in valid_arxiv_categories()
True
"""
schema = load_schema('elements/arxiv_categories')
categories = schema['enum']
categories.extend(_NEW_CATEGORIES.keys())
return categories | def function[valid_arxiv_categories, parameter[]]:
constant[List of all arXiv categories that ever existed.
Example:
>>> from inspire_schemas.utils import valid_arxiv_categories
>>> 'funct-an' in valid_arxiv_categories()
True
]
variable[schema] assign[=] call[name[load_schema], parameter[constant[elements/arxiv_categories]]]
variable[categories] assign[=] call[name[schema]][constant[enum]]
call[name[categories].extend, parameter[call[name[_NEW_CATEGORIES].keys, parameter[]]]]
return[name[categories]] | keyword[def] identifier[valid_arxiv_categories] ():
literal[string]
identifier[schema] = identifier[load_schema] ( literal[string] )
identifier[categories] = identifier[schema] [ literal[string] ]
identifier[categories] . identifier[extend] ( identifier[_NEW_CATEGORIES] . identifier[keys] ())
keyword[return] identifier[categories] | def valid_arxiv_categories():
"""List of all arXiv categories that ever existed.
Example:
>>> from inspire_schemas.utils import valid_arxiv_categories
>>> 'funct-an' in valid_arxiv_categories()
True
"""
schema = load_schema('elements/arxiv_categories')
categories = schema['enum']
categories.extend(_NEW_CATEGORIES.keys())
return categories |
def select_many(self, projector, selector=identity):
'''Projects each element of a sequence to an intermediate new sequence,
flattens the resulting sequence into one sequence and optionally
transforms the flattened sequence using a selector function.
Args:
projector: A unary function mapping each element of the source
sequence into an intermediate sequence. If no projection
function is provided, the intermediate sequence will consist of
the single corresponding element from the source sequence. The
projector function argument (which can have any name) and
return values are,
Args:
element: The value of the element
Returns:
An iterable derived from the element value
selector: An optional unary function mapping the elements in the
flattened intermediate sequence to corresponding elements of
the result sequence. If no selector function is provided, the
identity function is used. The selector function argument and
return values are,
Args:
element: The value of the intermediate element from the
concatenated sequences arising from the projector
function.
Returns:
The selected value derived from the element value
Returns:
A generated sequence whose elements are the result of projecting
each element of the source sequence using projector function and
then mapping each element through an optional selector function.
'''
sequences = (self._create(item).select(projector) for item in iter(self))
# TODO: [asq 2.0] Without the list() to force evaluation
# multiprocessing deadlocks...
chained_sequence = list(itertools.chain.from_iterable(sequences))
return self._create(self._pool.imap_unordered(selector,
chained_sequence, self._chunksize)) | def function[select_many, parameter[self, projector, selector]]:
constant[Projects each element of a sequence to an intermediate new sequence,
flattens the resulting sequence into one sequence and optionally
transforms the flattened sequence using a selector function.
Args:
projector: A unary function mapping each element of the source
sequence into an intermediate sequence. If no projection
function is provided, the intermediate sequence will consist of
the single corresponding element from the source sequence. The
projector function argument (which can have any name) and
return values are,
Args:
element: The value of the element
Returns:
An iterable derived from the element value
selector: An optional unary function mapping the elements in the
flattened intermediate sequence to corresponding elements of
the result sequence. If no selector function is provided, the
identity function is used. The selector function argument and
return values are,
Args:
element: The value of the intermediate element from the
concatenated sequences arising from the projector
function.
Returns:
The selected value derived from the element value
Returns:
A generated sequence whose elements are the result of projecting
each element of the source sequence using projector function and
then mapping each element through an optional selector function.
]
variable[sequences] assign[=] <ast.GeneratorExp object at 0x7da1b196d810>
variable[chained_sequence] assign[=] call[name[list], parameter[call[name[itertools].chain.from_iterable, parameter[name[sequences]]]]]
return[call[name[self]._create, parameter[call[name[self]._pool.imap_unordered, parameter[name[selector], name[chained_sequence], name[self]._chunksize]]]]] | keyword[def] identifier[select_many] ( identifier[self] , identifier[projector] , identifier[selector] = identifier[identity] ):
literal[string]
identifier[sequences] =( identifier[self] . identifier[_create] ( identifier[item] ). identifier[select] ( identifier[projector] ) keyword[for] identifier[item] keyword[in] identifier[iter] ( identifier[self] ))
identifier[chained_sequence] = identifier[list] ( identifier[itertools] . identifier[chain] . identifier[from_iterable] ( identifier[sequences] ))
keyword[return] identifier[self] . identifier[_create] ( identifier[self] . identifier[_pool] . identifier[imap_unordered] ( identifier[selector] ,
identifier[chained_sequence] , identifier[self] . identifier[_chunksize] )) | def select_many(self, projector, selector=identity):
"""Projects each element of a sequence to an intermediate new sequence,
flattens the resulting sequence into one sequence and optionally
transforms the flattened sequence using a selector function.
Args:
projector: A unary function mapping each element of the source
sequence into an intermediate sequence. If no projection
function is provided, the intermediate sequence will consist of
the single corresponding element from the source sequence. The
projector function argument (which can have any name) and
return values are,
Args:
element: The value of the element
Returns:
An iterable derived from the element value
selector: An optional unary function mapping the elements in the
flattened intermediate sequence to corresponding elements of
the result sequence. If no selector function is provided, the
identity function is used. The selector function argument and
return values are,
Args:
element: The value of the intermediate element from the
concatenated sequences arising from the projector
function.
Returns:
The selected value derived from the element value
Returns:
A generated sequence whose elements are the result of projecting
each element of the source sequence using projector function and
then mapping each element through an optional selector function.
"""
sequences = (self._create(item).select(projector) for item in iter(self)) # TODO: [asq 2.0] Without the list() to force evaluation
# multiprocessing deadlocks...
chained_sequence = list(itertools.chain.from_iterable(sequences))
return self._create(self._pool.imap_unordered(selector, chained_sequence, self._chunksize)) |
def fcoe_get_interface_output_fcoe_intf_list_fcoe_intf_peer_fcf_mac(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe_get_interface = ET.Element("fcoe_get_interface")
config = fcoe_get_interface
output = ET.SubElement(fcoe_get_interface, "output")
fcoe_intf_list = ET.SubElement(output, "fcoe-intf-list")
fcoe_intf_fcoe_port_id_key = ET.SubElement(fcoe_intf_list, "fcoe-intf-fcoe-port-id")
fcoe_intf_fcoe_port_id_key.text = kwargs.pop('fcoe_intf_fcoe_port_id')
fcoe_intf_peer_fcf_mac = ET.SubElement(fcoe_intf_list, "fcoe-intf-peer-fcf-mac")
fcoe_intf_peer_fcf_mac.text = kwargs.pop('fcoe_intf_peer_fcf_mac')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[fcoe_get_interface_output_fcoe_intf_list_fcoe_intf_peer_fcf_mac, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[fcoe_get_interface] assign[=] call[name[ET].Element, parameter[constant[fcoe_get_interface]]]
variable[config] assign[=] name[fcoe_get_interface]
variable[output] assign[=] call[name[ET].SubElement, parameter[name[fcoe_get_interface], constant[output]]]
variable[fcoe_intf_list] assign[=] call[name[ET].SubElement, parameter[name[output], constant[fcoe-intf-list]]]
variable[fcoe_intf_fcoe_port_id_key] assign[=] call[name[ET].SubElement, parameter[name[fcoe_intf_list], constant[fcoe-intf-fcoe-port-id]]]
name[fcoe_intf_fcoe_port_id_key].text assign[=] call[name[kwargs].pop, parameter[constant[fcoe_intf_fcoe_port_id]]]
variable[fcoe_intf_peer_fcf_mac] assign[=] call[name[ET].SubElement, parameter[name[fcoe_intf_list], constant[fcoe-intf-peer-fcf-mac]]]
name[fcoe_intf_peer_fcf_mac].text assign[=] call[name[kwargs].pop, parameter[constant[fcoe_intf_peer_fcf_mac]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[fcoe_get_interface_output_fcoe_intf_list_fcoe_intf_peer_fcf_mac] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[fcoe_get_interface] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[fcoe_get_interface]
identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[fcoe_get_interface] , literal[string] )
identifier[fcoe_intf_list] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] )
identifier[fcoe_intf_fcoe_port_id_key] = identifier[ET] . identifier[SubElement] ( identifier[fcoe_intf_list] , literal[string] )
identifier[fcoe_intf_fcoe_port_id_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[fcoe_intf_peer_fcf_mac] = identifier[ET] . identifier[SubElement] ( identifier[fcoe_intf_list] , literal[string] )
identifier[fcoe_intf_peer_fcf_mac] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def fcoe_get_interface_output_fcoe_intf_list_fcoe_intf_peer_fcf_mac(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
fcoe_get_interface = ET.Element('fcoe_get_interface')
config = fcoe_get_interface
output = ET.SubElement(fcoe_get_interface, 'output')
fcoe_intf_list = ET.SubElement(output, 'fcoe-intf-list')
fcoe_intf_fcoe_port_id_key = ET.SubElement(fcoe_intf_list, 'fcoe-intf-fcoe-port-id')
fcoe_intf_fcoe_port_id_key.text = kwargs.pop('fcoe_intf_fcoe_port_id')
fcoe_intf_peer_fcf_mac = ET.SubElement(fcoe_intf_list, 'fcoe-intf-peer-fcf-mac')
fcoe_intf_peer_fcf_mac.text = kwargs.pop('fcoe_intf_peer_fcf_mac')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def limit(self, limit):
""" Limit the number of rows returned from the database.
:param limit: The number of rows to return in the recipe. 0 will
return all rows.
:type limit: int
"""
if self._limit != limit:
self.dirty = True
self._limit = limit
return self | def function[limit, parameter[self, limit]]:
constant[ Limit the number of rows returned from the database.
:param limit: The number of rows to return in the recipe. 0 will
return all rows.
:type limit: int
]
if compare[name[self]._limit not_equal[!=] name[limit]] begin[:]
name[self].dirty assign[=] constant[True]
name[self]._limit assign[=] name[limit]
return[name[self]] | keyword[def] identifier[limit] ( identifier[self] , identifier[limit] ):
literal[string]
keyword[if] identifier[self] . identifier[_limit] != identifier[limit] :
identifier[self] . identifier[dirty] = keyword[True]
identifier[self] . identifier[_limit] = identifier[limit]
keyword[return] identifier[self] | def limit(self, limit):
""" Limit the number of rows returned from the database.
:param limit: The number of rows to return in the recipe. 0 will
return all rows.
:type limit: int
"""
if self._limit != limit:
self.dirty = True
self._limit = limit # depends on [control=['if'], data=['limit']]
return self |
def get(self,
resource_id=None,
resource_action=None,
resource_cls=None,
single_resource=False):
""" Gets the details for one or more resources by ID
Args:
cls - gophish.models.Model - The resource class
resource_id - str - The endpoint (URL path) for the resource
resource_action - str - An action to perform on the resource
resource_cls - cls - A class to use for parsing, if different than
the base resource
single_resource - bool - An override to tell Gophish that even
though we aren't requesting a single resource, we expect a
single response object
Returns:
One or more instances of cls parsed from the returned JSON
"""
endpoint = self.endpoint
if not resource_cls:
resource_cls = self._cls
if resource_id:
endpoint = self._build_url(endpoint, resource_id)
if resource_action:
endpoint = self._build_url(endpoint, resource_action)
response = self.api.execute("GET", endpoint)
if not response.ok:
raise Error.parse(response.json())
if resource_id or single_resource:
return resource_cls.parse(response.json())
return [resource_cls.parse(resource) for resource in response.json()] | def function[get, parameter[self, resource_id, resource_action, resource_cls, single_resource]]:
constant[ Gets the details for one or more resources by ID
Args:
cls - gophish.models.Model - The resource class
resource_id - str - The endpoint (URL path) for the resource
resource_action - str - An action to perform on the resource
resource_cls - cls - A class to use for parsing, if different than
the base resource
single_resource - bool - An override to tell Gophish that even
though we aren't requesting a single resource, we expect a
single response object
Returns:
One or more instances of cls parsed from the returned JSON
]
variable[endpoint] assign[=] name[self].endpoint
if <ast.UnaryOp object at 0x7da1b2344a60> begin[:]
variable[resource_cls] assign[=] name[self]._cls
if name[resource_id] begin[:]
variable[endpoint] assign[=] call[name[self]._build_url, parameter[name[endpoint], name[resource_id]]]
if name[resource_action] begin[:]
variable[endpoint] assign[=] call[name[self]._build_url, parameter[name[endpoint], name[resource_action]]]
variable[response] assign[=] call[name[self].api.execute, parameter[constant[GET], name[endpoint]]]
if <ast.UnaryOp object at 0x7da1b23450c0> begin[:]
<ast.Raise object at 0x7da1b11a6e90>
if <ast.BoolOp object at 0x7da1b11a4ca0> begin[:]
return[call[name[resource_cls].parse, parameter[call[name[response].json, parameter[]]]]]
return[<ast.ListComp object at 0x7da1b11a4e20>] | keyword[def] identifier[get] ( identifier[self] ,
identifier[resource_id] = keyword[None] ,
identifier[resource_action] = keyword[None] ,
identifier[resource_cls] = keyword[None] ,
identifier[single_resource] = keyword[False] ):
literal[string]
identifier[endpoint] = identifier[self] . identifier[endpoint]
keyword[if] keyword[not] identifier[resource_cls] :
identifier[resource_cls] = identifier[self] . identifier[_cls]
keyword[if] identifier[resource_id] :
identifier[endpoint] = identifier[self] . identifier[_build_url] ( identifier[endpoint] , identifier[resource_id] )
keyword[if] identifier[resource_action] :
identifier[endpoint] = identifier[self] . identifier[_build_url] ( identifier[endpoint] , identifier[resource_action] )
identifier[response] = identifier[self] . identifier[api] . identifier[execute] ( literal[string] , identifier[endpoint] )
keyword[if] keyword[not] identifier[response] . identifier[ok] :
keyword[raise] identifier[Error] . identifier[parse] ( identifier[response] . identifier[json] ())
keyword[if] identifier[resource_id] keyword[or] identifier[single_resource] :
keyword[return] identifier[resource_cls] . identifier[parse] ( identifier[response] . identifier[json] ())
keyword[return] [ identifier[resource_cls] . identifier[parse] ( identifier[resource] ) keyword[for] identifier[resource] keyword[in] identifier[response] . identifier[json] ()] | def get(self, resource_id=None, resource_action=None, resource_cls=None, single_resource=False):
""" Gets the details for one or more resources by ID
Args:
cls - gophish.models.Model - The resource class
resource_id - str - The endpoint (URL path) for the resource
resource_action - str - An action to perform on the resource
resource_cls - cls - A class to use for parsing, if different than
the base resource
single_resource - bool - An override to tell Gophish that even
though we aren't requesting a single resource, we expect a
single response object
Returns:
One or more instances of cls parsed from the returned JSON
"""
endpoint = self.endpoint
if not resource_cls:
resource_cls = self._cls # depends on [control=['if'], data=[]]
if resource_id:
endpoint = self._build_url(endpoint, resource_id) # depends on [control=['if'], data=[]]
if resource_action:
endpoint = self._build_url(endpoint, resource_action) # depends on [control=['if'], data=[]]
response = self.api.execute('GET', endpoint)
if not response.ok:
raise Error.parse(response.json()) # depends on [control=['if'], data=[]]
if resource_id or single_resource:
return resource_cls.parse(response.json()) # depends on [control=['if'], data=[]]
return [resource_cls.parse(resource) for resource in response.json()] |
def __construct_byset(self, start, byxxx, base):
"""
If a `BYXXX` sequence is passed to the constructor at the same level as
`FREQ` (e.g. `FREQ=HOURLY,BYHOUR={2,4,7},INTERVAL=3`), there are some
specifications which cannot be reached given some starting conditions.
This occurs whenever the interval is not coprime with the base of a
given unit and the difference between the starting position and the
ending position is not coprime with the greatest common denominator
between the interval and the base. For example, with a FREQ of hourly
starting at 17:00 and an interval of 4, the only valid values for
BYHOUR would be {21, 1, 5, 9, 13, 17}, because 4 and 24 are not
coprime.
:param start:
Specifies the starting position.
:param byxxx:
An iterable containing the list of allowed values.
:param base:
The largest allowable value for the specified frequency (e.g.
24 hours, 60 minutes).
This does not preserve the type of the iterable, returning a set, since
the values should be unique and the order is irrelevant, this will
speed up later lookups.
In the event of an empty set, raises a :exception:`ValueError`, as this
results in an empty rrule.
"""
cset = set()
# Support a single byxxx value.
if isinstance(byxxx, integer_types):
byxxx = (byxxx, )
for num in byxxx:
i_gcd = gcd(self._interval, base)
# Use divmod rather than % because we need to wrap negative nums.
if i_gcd == 1 or divmod(num - start, i_gcd)[1] == 0:
cset.add(num)
if len(cset) == 0:
raise ValueError("Invalid rrule byxxx generates an empty set.")
return cset | def function[__construct_byset, parameter[self, start, byxxx, base]]:
constant[
If a `BYXXX` sequence is passed to the constructor at the same level as
`FREQ` (e.g. `FREQ=HOURLY,BYHOUR={2,4,7},INTERVAL=3`), there are some
specifications which cannot be reached given some starting conditions.
This occurs whenever the interval is not coprime with the base of a
given unit and the difference between the starting position and the
ending position is not coprime with the greatest common denominator
between the interval and the base. For example, with a FREQ of hourly
starting at 17:00 and an interval of 4, the only valid values for
BYHOUR would be {21, 1, 5, 9, 13, 17}, because 4 and 24 are not
coprime.
:param start:
Specifies the starting position.
:param byxxx:
An iterable containing the list of allowed values.
:param base:
The largest allowable value for the specified frequency (e.g.
24 hours, 60 minutes).
This does not preserve the type of the iterable, returning a set, since
the values should be unique and the order is irrelevant, this will
speed up later lookups.
In the event of an empty set, raises a :exception:`ValueError`, as this
results in an empty rrule.
]
variable[cset] assign[=] call[name[set], parameter[]]
if call[name[isinstance], parameter[name[byxxx], name[integer_types]]] begin[:]
variable[byxxx] assign[=] tuple[[<ast.Name object at 0x7da20c6a8bb0>]]
for taget[name[num]] in starred[name[byxxx]] begin[:]
variable[i_gcd] assign[=] call[name[gcd], parameter[name[self]._interval, name[base]]]
if <ast.BoolOp object at 0x7da20c6a8d30> begin[:]
call[name[cset].add, parameter[name[num]]]
if compare[call[name[len], parameter[name[cset]]] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da20c6a9510>
return[name[cset]] | keyword[def] identifier[__construct_byset] ( identifier[self] , identifier[start] , identifier[byxxx] , identifier[base] ):
literal[string]
identifier[cset] = identifier[set] ()
keyword[if] identifier[isinstance] ( identifier[byxxx] , identifier[integer_types] ):
identifier[byxxx] =( identifier[byxxx] ,)
keyword[for] identifier[num] keyword[in] identifier[byxxx] :
identifier[i_gcd] = identifier[gcd] ( identifier[self] . identifier[_interval] , identifier[base] )
keyword[if] identifier[i_gcd] == literal[int] keyword[or] identifier[divmod] ( identifier[num] - identifier[start] , identifier[i_gcd] )[ literal[int] ]== literal[int] :
identifier[cset] . identifier[add] ( identifier[num] )
keyword[if] identifier[len] ( identifier[cset] )== literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[cset] | def __construct_byset(self, start, byxxx, base):
"""
If a `BYXXX` sequence is passed to the constructor at the same level as
`FREQ` (e.g. `FREQ=HOURLY,BYHOUR={2,4,7},INTERVAL=3`), there are some
specifications which cannot be reached given some starting conditions.
This occurs whenever the interval is not coprime with the base of a
given unit and the difference between the starting position and the
ending position is not coprime with the greatest common denominator
between the interval and the base. For example, with a FREQ of hourly
starting at 17:00 and an interval of 4, the only valid values for
BYHOUR would be {21, 1, 5, 9, 13, 17}, because 4 and 24 are not
coprime.
:param start:
Specifies the starting position.
:param byxxx:
An iterable containing the list of allowed values.
:param base:
The largest allowable value for the specified frequency (e.g.
24 hours, 60 minutes).
This does not preserve the type of the iterable, returning a set, since
the values should be unique and the order is irrelevant, this will
speed up later lookups.
In the event of an empty set, raises a :exception:`ValueError`, as this
results in an empty rrule.
"""
cset = set()
# Support a single byxxx value.
if isinstance(byxxx, integer_types):
byxxx = (byxxx,) # depends on [control=['if'], data=[]]
for num in byxxx:
i_gcd = gcd(self._interval, base)
# Use divmod rather than % because we need to wrap negative nums.
if i_gcd == 1 or divmod(num - start, i_gcd)[1] == 0:
cset.add(num) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['num']]
if len(cset) == 0:
raise ValueError('Invalid rrule byxxx generates an empty set.') # depends on [control=['if'], data=[]]
return cset |
def calculate_tensor_to_probability_map_output_shapes(operator):
'''
Allowed input/output patterns are
ONNX < 1.2
1. [1, C] ---> ---> A map
2. [1, C_1, ..., C_n] ---> A map
ONNX >= 1.2
1. [N, C] ---> ---> A sequence of maps
2. [N, C_1, ..., C_n] ---> A sequence of maps
Note that N must be 1 currently if you're using ONNX<1.2 because old ZipMap doesn't produce a seqneuce of map If the
input is not [N, C], it will be reshaped into [N, C_1 x C_2, x ... x C_n] before being fed into ONNX ZipMap.
'''
check_input_and_output_numbers(operator, input_count_range=1, output_count_range=1)
check_input_and_output_types(operator, good_input_types=[FloatTensorType])
model_type = operator.raw_operator.WhichOneof('Type')
if model_type == 'neuralNetworkClassifier':
class_label_type = operator.raw_operator.neuralNetworkClassifier.WhichOneof('ClassLabels')
else:
raise TypeError('%s has no class label' % model_type)
N = operator.inputs[0].type.shape[0]
doc_string = operator.outputs[0].type.doc_string
if class_label_type == 'stringClassLabels':
if operator.target_opset < 7:
operator.outputs[0].type = DictionaryType(StringTensorType([1]), FloatTensorType([1]), doc_string)
else:
operator.outputs[0].type = \
SequenceType(DictionaryType(StringTensorType([]), FloatTensorType([])), N, doc_string)
elif class_label_type == 'int64ClassLabels':
if operator.target_opset < 7:
operator.outputs[0].type = DictionaryType(Int64TensorType([1]), FloatTensorType([1]), doc_string)
else:
operator.outputs[0].type = \
SequenceType(DictionaryType(Int64TensorType([]), FloatTensorType([])), N, doc_string)
else:
raise ValueError('Unsupported label type') | def function[calculate_tensor_to_probability_map_output_shapes, parameter[operator]]:
constant[
Allowed input/output patterns are
ONNX < 1.2
1. [1, C] ---> ---> A map
2. [1, C_1, ..., C_n] ---> A map
ONNX >= 1.2
1. [N, C] ---> ---> A sequence of maps
2. [N, C_1, ..., C_n] ---> A sequence of maps
Note that N must be 1 currently if you're using ONNX<1.2 because old ZipMap doesn't produce a seqneuce of map If the
input is not [N, C], it will be reshaped into [N, C_1 x C_2, x ... x C_n] before being fed into ONNX ZipMap.
]
call[name[check_input_and_output_numbers], parameter[name[operator]]]
call[name[check_input_and_output_types], parameter[name[operator]]]
variable[model_type] assign[=] call[name[operator].raw_operator.WhichOneof, parameter[constant[Type]]]
if compare[name[model_type] equal[==] constant[neuralNetworkClassifier]] begin[:]
variable[class_label_type] assign[=] call[name[operator].raw_operator.neuralNetworkClassifier.WhichOneof, parameter[constant[ClassLabels]]]
variable[N] assign[=] call[call[name[operator].inputs][constant[0]].type.shape][constant[0]]
variable[doc_string] assign[=] call[name[operator].outputs][constant[0]].type.doc_string
if compare[name[class_label_type] equal[==] constant[stringClassLabels]] begin[:]
if compare[name[operator].target_opset less[<] constant[7]] begin[:]
call[name[operator].outputs][constant[0]].type assign[=] call[name[DictionaryType], parameter[call[name[StringTensorType], parameter[list[[<ast.Constant object at 0x7da20c6ab940>]]]], call[name[FloatTensorType], parameter[list[[<ast.Constant object at 0x7da20c6a8520>]]]], name[doc_string]]] | keyword[def] identifier[calculate_tensor_to_probability_map_output_shapes] ( identifier[operator] ):
literal[string]
identifier[check_input_and_output_numbers] ( identifier[operator] , identifier[input_count_range] = literal[int] , identifier[output_count_range] = literal[int] )
identifier[check_input_and_output_types] ( identifier[operator] , identifier[good_input_types] =[ identifier[FloatTensorType] ])
identifier[model_type] = identifier[operator] . identifier[raw_operator] . identifier[WhichOneof] ( literal[string] )
keyword[if] identifier[model_type] == literal[string] :
identifier[class_label_type] = identifier[operator] . identifier[raw_operator] . identifier[neuralNetworkClassifier] . identifier[WhichOneof] ( literal[string] )
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] % identifier[model_type] )
identifier[N] = identifier[operator] . identifier[inputs] [ literal[int] ]. identifier[type] . identifier[shape] [ literal[int] ]
identifier[doc_string] = identifier[operator] . identifier[outputs] [ literal[int] ]. identifier[type] . identifier[doc_string]
keyword[if] identifier[class_label_type] == literal[string] :
keyword[if] identifier[operator] . identifier[target_opset] < literal[int] :
identifier[operator] . identifier[outputs] [ literal[int] ]. identifier[type] = identifier[DictionaryType] ( identifier[StringTensorType] ([ literal[int] ]), identifier[FloatTensorType] ([ literal[int] ]), identifier[doc_string] )
keyword[else] :
identifier[operator] . identifier[outputs] [ literal[int] ]. identifier[type] = identifier[SequenceType] ( identifier[DictionaryType] ( identifier[StringTensorType] ([]), identifier[FloatTensorType] ([])), identifier[N] , identifier[doc_string] )
keyword[elif] identifier[class_label_type] == literal[string] :
keyword[if] identifier[operator] . identifier[target_opset] < literal[int] :
identifier[operator] . identifier[outputs] [ literal[int] ]. identifier[type] = identifier[DictionaryType] ( identifier[Int64TensorType] ([ literal[int] ]), identifier[FloatTensorType] ([ literal[int] ]), identifier[doc_string] )
keyword[else] :
identifier[operator] . identifier[outputs] [ literal[int] ]. identifier[type] = identifier[SequenceType] ( identifier[DictionaryType] ( identifier[Int64TensorType] ([]), identifier[FloatTensorType] ([])), identifier[N] , identifier[doc_string] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] ) | def calculate_tensor_to_probability_map_output_shapes(operator):
"""
Allowed input/output patterns are
ONNX < 1.2
1. [1, C] ---> ---> A map
2. [1, C_1, ..., C_n] ---> A map
ONNX >= 1.2
1. [N, C] ---> ---> A sequence of maps
2. [N, C_1, ..., C_n] ---> A sequence of maps
Note that N must be 1 currently if you're using ONNX<1.2 because old ZipMap doesn't produce a seqneuce of map If the
input is not [N, C], it will be reshaped into [N, C_1 x C_2, x ... x C_n] before being fed into ONNX ZipMap.
"""
check_input_and_output_numbers(operator, input_count_range=1, output_count_range=1)
check_input_and_output_types(operator, good_input_types=[FloatTensorType])
model_type = operator.raw_operator.WhichOneof('Type')
if model_type == 'neuralNetworkClassifier':
class_label_type = operator.raw_operator.neuralNetworkClassifier.WhichOneof('ClassLabels') # depends on [control=['if'], data=[]]
else:
raise TypeError('%s has no class label' % model_type)
N = operator.inputs[0].type.shape[0]
doc_string = operator.outputs[0].type.doc_string
if class_label_type == 'stringClassLabels':
if operator.target_opset < 7:
operator.outputs[0].type = DictionaryType(StringTensorType([1]), FloatTensorType([1]), doc_string) # depends on [control=['if'], data=[]]
else:
operator.outputs[0].type = SequenceType(DictionaryType(StringTensorType([]), FloatTensorType([])), N, doc_string) # depends on [control=['if'], data=[]]
elif class_label_type == 'int64ClassLabels':
if operator.target_opset < 7:
operator.outputs[0].type = DictionaryType(Int64TensorType([1]), FloatTensorType([1]), doc_string) # depends on [control=['if'], data=[]]
else:
operator.outputs[0].type = SequenceType(DictionaryType(Int64TensorType([]), FloatTensorType([])), N, doc_string) # depends on [control=['if'], data=[]]
else:
raise ValueError('Unsupported label type') |
def sync(self):
""" Upload the changed registers to the chip
This will check which register have been changed since the last sync and send them to the chip.
You need to call this method if you modify one of the register attributes (mcp23017.IODIRA for example) or
if you use one of the helper attributes (mcp23017.direction_A0 for example)
"""
registers = {
0x00: 'IODIRA',
0x01: 'IODIRB',
0x02: 'IPOLA',
0x03: 'IPOLB',
0x04: 'GPINTENA',
0x05: 'GPINTENB',
0x0C: 'GPPUA',
0x0D: 'GPPUB',
0x12: 'GPIOA',
0x13: 'GPIOB'
}
for reg in registers:
name = registers[reg]
if getattr(self, name) != getattr(self, '_' + name):
self.i2c_write_register(reg, [getattr(self, name)])
setattr(self, '_' + name, getattr(self, name)) | def function[sync, parameter[self]]:
constant[ Upload the changed registers to the chip
This will check which register have been changed since the last sync and send them to the chip.
You need to call this method if you modify one of the register attributes (mcp23017.IODIRA for example) or
if you use one of the helper attributes (mcp23017.direction_A0 for example)
]
variable[registers] assign[=] dictionary[[<ast.Constant object at 0x7da1b1b6af50>, <ast.Constant object at 0x7da1b1b6b010>, <ast.Constant object at 0x7da1b1b6b7f0>, <ast.Constant object at 0x7da1b1b6a110>, <ast.Constant object at 0x7da1b1b69bd0>, <ast.Constant object at 0x7da1b1b6beb0>, <ast.Constant object at 0x7da1b1b68c70>, <ast.Constant object at 0x7da1b1b69f00>, <ast.Constant object at 0x7da1b1b69750>, <ast.Constant object at 0x7da1b1b6a4d0>], [<ast.Constant object at 0x7da1b1b697b0>, <ast.Constant object at 0x7da1b1b69fc0>, <ast.Constant object at 0x7da1b1b6b700>, <ast.Constant object at 0x7da1b1b69180>, <ast.Constant object at 0x7da1b1b691b0>, <ast.Constant object at 0x7da1b1b6aa40>, <ast.Constant object at 0x7da1b1b68520>, <ast.Constant object at 0x7da1b1b6bf10>, <ast.Constant object at 0x7da1b1b6a020>, <ast.Constant object at 0x7da1b1b68dc0>]]
for taget[name[reg]] in starred[name[registers]] begin[:]
variable[name] assign[=] call[name[registers]][name[reg]]
if compare[call[name[getattr], parameter[name[self], name[name]]] not_equal[!=] call[name[getattr], parameter[name[self], binary_operation[constant[_] + name[name]]]]] begin[:]
call[name[self].i2c_write_register, parameter[name[reg], list[[<ast.Call object at 0x7da1b1b68f10>]]]]
call[name[setattr], parameter[name[self], binary_operation[constant[_] + name[name]], call[name[getattr], parameter[name[self], name[name]]]]] | keyword[def] identifier[sync] ( identifier[self] ):
literal[string]
identifier[registers] ={
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string]
}
keyword[for] identifier[reg] keyword[in] identifier[registers] :
identifier[name] = identifier[registers] [ identifier[reg] ]
keyword[if] identifier[getattr] ( identifier[self] , identifier[name] )!= identifier[getattr] ( identifier[self] , literal[string] + identifier[name] ):
identifier[self] . identifier[i2c_write_register] ( identifier[reg] ,[ identifier[getattr] ( identifier[self] , identifier[name] )])
identifier[setattr] ( identifier[self] , literal[string] + identifier[name] , identifier[getattr] ( identifier[self] , identifier[name] )) | def sync(self):
""" Upload the changed registers to the chip
This will check which register have been changed since the last sync and send them to the chip.
You need to call this method if you modify one of the register attributes (mcp23017.IODIRA for example) or
if you use one of the helper attributes (mcp23017.direction_A0 for example)
"""
registers = {0: 'IODIRA', 1: 'IODIRB', 2: 'IPOLA', 3: 'IPOLB', 4: 'GPINTENA', 5: 'GPINTENB', 12: 'GPPUA', 13: 'GPPUB', 18: 'GPIOA', 19: 'GPIOB'}
for reg in registers:
name = registers[reg]
if getattr(self, name) != getattr(self, '_' + name):
self.i2c_write_register(reg, [getattr(self, name)])
setattr(self, '_' + name, getattr(self, name)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['reg']] |
def add_subsegment(self, subsegment):
"""
Add input subsegment as a child subsegment.
"""
self._check_ended()
subsegment.parent_id = self.id
self.subsegments.append(subsegment) | def function[add_subsegment, parameter[self, subsegment]]:
constant[
Add input subsegment as a child subsegment.
]
call[name[self]._check_ended, parameter[]]
name[subsegment].parent_id assign[=] name[self].id
call[name[self].subsegments.append, parameter[name[subsegment]]] | keyword[def] identifier[add_subsegment] ( identifier[self] , identifier[subsegment] ):
literal[string]
identifier[self] . identifier[_check_ended] ()
identifier[subsegment] . identifier[parent_id] = identifier[self] . identifier[id]
identifier[self] . identifier[subsegments] . identifier[append] ( identifier[subsegment] ) | def add_subsegment(self, subsegment):
"""
Add input subsegment as a child subsegment.
"""
self._check_ended()
subsegment.parent_id = self.id
self.subsegments.append(subsegment) |
def get_conversion_factor(self, new_unit):
"""
Returns a conversion factor between this unit and a new unit.
Compound units are supported, but must have the same powers in each
unit type.
Args:
new_unit: The new unit.
"""
uo_base, ofactor = self.as_base_units
un_base, nfactor = Unit(new_unit).as_base_units
units_new = sorted(un_base.items(),
key=lambda d: _UNAME2UTYPE[d[0]])
units_old = sorted(uo_base.items(),
key=lambda d: _UNAME2UTYPE[d[0]])
factor = ofactor / nfactor
for uo, un in zip(units_old, units_new):
if uo[1] != un[1]:
raise UnitError("Units %s and %s are not compatible!" % (uo, un))
c = ALL_UNITS[_UNAME2UTYPE[uo[0]]]
factor *= (c[uo[0]] / c[un[0]]) ** uo[1]
return factor | def function[get_conversion_factor, parameter[self, new_unit]]:
constant[
Returns a conversion factor between this unit and a new unit.
Compound units are supported, but must have the same powers in each
unit type.
Args:
new_unit: The new unit.
]
<ast.Tuple object at 0x7da1b1cea560> assign[=] name[self].as_base_units
<ast.Tuple object at 0x7da1b1cea620> assign[=] call[name[Unit], parameter[name[new_unit]]].as_base_units
variable[units_new] assign[=] call[name[sorted], parameter[call[name[un_base].items, parameter[]]]]
variable[units_old] assign[=] call[name[sorted], parameter[call[name[uo_base].items, parameter[]]]]
variable[factor] assign[=] binary_operation[name[ofactor] / name[nfactor]]
for taget[tuple[[<ast.Name object at 0x7da18fe92350>, <ast.Name object at 0x7da18fe904c0>]]] in starred[call[name[zip], parameter[name[units_old], name[units_new]]]] begin[:]
if compare[call[name[uo]][constant[1]] not_equal[!=] call[name[un]][constant[1]]] begin[:]
<ast.Raise object at 0x7da18fe92980>
variable[c] assign[=] call[name[ALL_UNITS]][call[name[_UNAME2UTYPE]][call[name[uo]][constant[0]]]]
<ast.AugAssign object at 0x7da18fe91d80>
return[name[factor]] | keyword[def] identifier[get_conversion_factor] ( identifier[self] , identifier[new_unit] ):
literal[string]
identifier[uo_base] , identifier[ofactor] = identifier[self] . identifier[as_base_units]
identifier[un_base] , identifier[nfactor] = identifier[Unit] ( identifier[new_unit] ). identifier[as_base_units]
identifier[units_new] = identifier[sorted] ( identifier[un_base] . identifier[items] (),
identifier[key] = keyword[lambda] identifier[d] : identifier[_UNAME2UTYPE] [ identifier[d] [ literal[int] ]])
identifier[units_old] = identifier[sorted] ( identifier[uo_base] . identifier[items] (),
identifier[key] = keyword[lambda] identifier[d] : identifier[_UNAME2UTYPE] [ identifier[d] [ literal[int] ]])
identifier[factor] = identifier[ofactor] / identifier[nfactor]
keyword[for] identifier[uo] , identifier[un] keyword[in] identifier[zip] ( identifier[units_old] , identifier[units_new] ):
keyword[if] identifier[uo] [ literal[int] ]!= identifier[un] [ literal[int] ]:
keyword[raise] identifier[UnitError] ( literal[string] %( identifier[uo] , identifier[un] ))
identifier[c] = identifier[ALL_UNITS] [ identifier[_UNAME2UTYPE] [ identifier[uo] [ literal[int] ]]]
identifier[factor] *=( identifier[c] [ identifier[uo] [ literal[int] ]]/ identifier[c] [ identifier[un] [ literal[int] ]])** identifier[uo] [ literal[int] ]
keyword[return] identifier[factor] | def get_conversion_factor(self, new_unit):
"""
Returns a conversion factor between this unit and a new unit.
Compound units are supported, but must have the same powers in each
unit type.
Args:
new_unit: The new unit.
"""
(uo_base, ofactor) = self.as_base_units
(un_base, nfactor) = Unit(new_unit).as_base_units
units_new = sorted(un_base.items(), key=lambda d: _UNAME2UTYPE[d[0]])
units_old = sorted(uo_base.items(), key=lambda d: _UNAME2UTYPE[d[0]])
factor = ofactor / nfactor
for (uo, un) in zip(units_old, units_new):
if uo[1] != un[1]:
raise UnitError('Units %s and %s are not compatible!' % (uo, un)) # depends on [control=['if'], data=[]]
c = ALL_UNITS[_UNAME2UTYPE[uo[0]]]
factor *= (c[uo[0]] / c[un[0]]) ** uo[1] # depends on [control=['for'], data=[]]
return factor |
def update_fw(self, nids, fw_type, fw_ver, fw_path=None):
"""Update firwmare of all node_ids in nids."""
fw_bin = None
if fw_path:
fw_bin = load_fw(fw_path)
if not fw_bin:
return
self.ota.make_update(nids, fw_type, fw_ver, fw_bin) | def function[update_fw, parameter[self, nids, fw_type, fw_ver, fw_path]]:
constant[Update firwmare of all node_ids in nids.]
variable[fw_bin] assign[=] constant[None]
if name[fw_path] begin[:]
variable[fw_bin] assign[=] call[name[load_fw], parameter[name[fw_path]]]
if <ast.UnaryOp object at 0x7da1b27b8340> begin[:]
return[None]
call[name[self].ota.make_update, parameter[name[nids], name[fw_type], name[fw_ver], name[fw_bin]]] | keyword[def] identifier[update_fw] ( identifier[self] , identifier[nids] , identifier[fw_type] , identifier[fw_ver] , identifier[fw_path] = keyword[None] ):
literal[string]
identifier[fw_bin] = keyword[None]
keyword[if] identifier[fw_path] :
identifier[fw_bin] = identifier[load_fw] ( identifier[fw_path] )
keyword[if] keyword[not] identifier[fw_bin] :
keyword[return]
identifier[self] . identifier[ota] . identifier[make_update] ( identifier[nids] , identifier[fw_type] , identifier[fw_ver] , identifier[fw_bin] ) | def update_fw(self, nids, fw_type, fw_ver, fw_path=None):
"""Update firwmare of all node_ids in nids."""
fw_bin = None
if fw_path:
fw_bin = load_fw(fw_path)
if not fw_bin:
return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
self.ota.make_update(nids, fw_type, fw_ver, fw_bin) |
async def pin_6_pwm_128(my_board):
"""
Set digital pin 6 as a PWM output and set its output value to 128
@param my_board: A PymataCore instance
@return: No Return Value
"""
# set the pin mode
await my_board.set_pin_mode(6, Constants.PWM)
# set the pin to 128
await my_board.analog_write(6, 128)
# let the led stay lit for 3 seconds
await asyncio.sleep(3)
# shutdown
await my_board.shutdown() | <ast.AsyncFunctionDef object at 0x7da18eb54550> | keyword[async] keyword[def] identifier[pin_6_pwm_128] ( identifier[my_board] ):
literal[string]
keyword[await] identifier[my_board] . identifier[set_pin_mode] ( literal[int] , identifier[Constants] . identifier[PWM] )
keyword[await] identifier[my_board] . identifier[analog_write] ( literal[int] , literal[int] )
keyword[await] identifier[asyncio] . identifier[sleep] ( literal[int] )
keyword[await] identifier[my_board] . identifier[shutdown] () | async def pin_6_pwm_128(my_board):
"""
Set digital pin 6 as a PWM output and set its output value to 128
@param my_board: A PymataCore instance
@return: No Return Value
"""
# set the pin mode
await my_board.set_pin_mode(6, Constants.PWM)
# set the pin to 128
await my_board.analog_write(6, 128)
# let the led stay lit for 3 seconds
await asyncio.sleep(3)
# shutdown
await my_board.shutdown() |
def valid_words_set(path_to_user_dictionary=None,
user_dictionary_words=None):
"""Get a set of valid words.
If :path_to_user_dictionary: is specified, then the newline-separated
words in that file will be added to the word set.
"""
def read_file(binary_file):
"""Read a binary file for its text lines."""
return binary_file.read().decode("ascii").splitlines()
try:
valid = _valid_words_cache[path_to_user_dictionary]
return valid
except KeyError:
words = set()
with resource_stream("polysquarelinter", "en_US.txt") as words_file:
words |= set(["".join(l).lower() for l in read_file(words_file)])
if path_to_user_dictionary:
# Add both case-sensitive and case-insensitive variants
# of words in user dictionary as they may be checked as
# though they are a regular word and a technical word.
words |= set([w.lower() for w in user_dictionary_words])
words |= user_dictionary_words
_valid_words_cache[path_to_user_dictionary] = words
return words | def function[valid_words_set, parameter[path_to_user_dictionary, user_dictionary_words]]:
constant[Get a set of valid words.
If :path_to_user_dictionary: is specified, then the newline-separated
words in that file will be added to the word set.
]
def function[read_file, parameter[binary_file]]:
constant[Read a binary file for its text lines.]
return[call[call[call[name[binary_file].read, parameter[]].decode, parameter[constant[ascii]]].splitlines, parameter[]]]
<ast.Try object at 0x7da204621ea0> | keyword[def] identifier[valid_words_set] ( identifier[path_to_user_dictionary] = keyword[None] ,
identifier[user_dictionary_words] = keyword[None] ):
literal[string]
keyword[def] identifier[read_file] ( identifier[binary_file] ):
literal[string]
keyword[return] identifier[binary_file] . identifier[read] (). identifier[decode] ( literal[string] ). identifier[splitlines] ()
keyword[try] :
identifier[valid] = identifier[_valid_words_cache] [ identifier[path_to_user_dictionary] ]
keyword[return] identifier[valid]
keyword[except] identifier[KeyError] :
identifier[words] = identifier[set] ()
keyword[with] identifier[resource_stream] ( literal[string] , literal[string] ) keyword[as] identifier[words_file] :
identifier[words] |= identifier[set] ([ literal[string] . identifier[join] ( identifier[l] ). identifier[lower] () keyword[for] identifier[l] keyword[in] identifier[read_file] ( identifier[words_file] )])
keyword[if] identifier[path_to_user_dictionary] :
identifier[words] |= identifier[set] ([ identifier[w] . identifier[lower] () keyword[for] identifier[w] keyword[in] identifier[user_dictionary_words] ])
identifier[words] |= identifier[user_dictionary_words]
identifier[_valid_words_cache] [ identifier[path_to_user_dictionary] ]= identifier[words]
keyword[return] identifier[words] | def valid_words_set(path_to_user_dictionary=None, user_dictionary_words=None):
"""Get a set of valid words.
If :path_to_user_dictionary: is specified, then the newline-separated
words in that file will be added to the word set.
"""
def read_file(binary_file):
"""Read a binary file for its text lines."""
return binary_file.read().decode('ascii').splitlines()
try:
valid = _valid_words_cache[path_to_user_dictionary]
return valid # depends on [control=['try'], data=[]]
except KeyError:
words = set()
with resource_stream('polysquarelinter', 'en_US.txt') as words_file:
words |= set([''.join(l).lower() for l in read_file(words_file)]) # depends on [control=['with'], data=['words_file']]
if path_to_user_dictionary:
# Add both case-sensitive and case-insensitive variants
# of words in user dictionary as they may be checked as
# though they are a regular word and a technical word.
words |= set([w.lower() for w in user_dictionary_words])
words |= user_dictionary_words # depends on [control=['if'], data=[]]
_valid_words_cache[path_to_user_dictionary] = words
return words # depends on [control=['except'], data=[]] |
def set_time(self, time):
'''
Converts time data into a pandas date object.
Parameters
----------
time: netcdf
Contains time information.
Returns
-------
pandas.DatetimeIndex
'''
times = num2date(time[:].squeeze(), time.units)
self.time = pd.DatetimeIndex(pd.Series(times), tz=self.location.tz) | def function[set_time, parameter[self, time]]:
constant[
Converts time data into a pandas date object.
Parameters
----------
time: netcdf
Contains time information.
Returns
-------
pandas.DatetimeIndex
]
variable[times] assign[=] call[name[num2date], parameter[call[call[name[time]][<ast.Slice object at 0x7da1b1bafc70>].squeeze, parameter[]], name[time].units]]
name[self].time assign[=] call[name[pd].DatetimeIndex, parameter[call[name[pd].Series, parameter[name[times]]]]] | keyword[def] identifier[set_time] ( identifier[self] , identifier[time] ):
literal[string]
identifier[times] = identifier[num2date] ( identifier[time] [:]. identifier[squeeze] (), identifier[time] . identifier[units] )
identifier[self] . identifier[time] = identifier[pd] . identifier[DatetimeIndex] ( identifier[pd] . identifier[Series] ( identifier[times] ), identifier[tz] = identifier[self] . identifier[location] . identifier[tz] ) | def set_time(self, time):
"""
Converts time data into a pandas date object.
Parameters
----------
time: netcdf
Contains time information.
Returns
-------
pandas.DatetimeIndex
"""
times = num2date(time[:].squeeze(), time.units)
self.time = pd.DatetimeIndex(pd.Series(times), tz=self.location.tz) |
def sign(self, data, b64=True):
"""Sign data with the private key and return the signed data.
The signed data will be Base64 encoded if b64 is True.
"""
padder = padding.PKCS1v15()
signer = self.private_key.signer(padder, None)
if not isinstance(data, six.binary_type):
data = data.encode('utf_8')
signer.update(data)
signed = signer.finalize()
if b64:
signed = base64.b64encode(signed)
return signed | def function[sign, parameter[self, data, b64]]:
constant[Sign data with the private key and return the signed data.
The signed data will be Base64 encoded if b64 is True.
]
variable[padder] assign[=] call[name[padding].PKCS1v15, parameter[]]
variable[signer] assign[=] call[name[self].private_key.signer, parameter[name[padder], constant[None]]]
if <ast.UnaryOp object at 0x7da1b23b3940> begin[:]
variable[data] assign[=] call[name[data].encode, parameter[constant[utf_8]]]
call[name[signer].update, parameter[name[data]]]
variable[signed] assign[=] call[name[signer].finalize, parameter[]]
if name[b64] begin[:]
variable[signed] assign[=] call[name[base64].b64encode, parameter[name[signed]]]
return[name[signed]] | keyword[def] identifier[sign] ( identifier[self] , identifier[data] , identifier[b64] = keyword[True] ):
literal[string]
identifier[padder] = identifier[padding] . identifier[PKCS1v15] ()
identifier[signer] = identifier[self] . identifier[private_key] . identifier[signer] ( identifier[padder] , keyword[None] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[data] , identifier[six] . identifier[binary_type] ):
identifier[data] = identifier[data] . identifier[encode] ( literal[string] )
identifier[signer] . identifier[update] ( identifier[data] )
identifier[signed] = identifier[signer] . identifier[finalize] ()
keyword[if] identifier[b64] :
identifier[signed] = identifier[base64] . identifier[b64encode] ( identifier[signed] )
keyword[return] identifier[signed] | def sign(self, data, b64=True):
"""Sign data with the private key and return the signed data.
The signed data will be Base64 encoded if b64 is True.
"""
padder = padding.PKCS1v15()
signer = self.private_key.signer(padder, None)
if not isinstance(data, six.binary_type):
data = data.encode('utf_8') # depends on [control=['if'], data=[]]
signer.update(data)
signed = signer.finalize()
if b64:
signed = base64.b64encode(signed) # depends on [control=['if'], data=[]]
return signed |
def DeleteUserDefinedFunction(self, udf_link, options=None):
"""Deletes a user defined function.
:param str udf_link:
The link to the user defined function.
:param dict options:
The request options for the request.
:return:
The deleted UDF.
:rtype:
dict
"""
if options is None:
options = {}
path = base.GetPathFromLink(udf_link)
udf_id = base.GetResourceIdOrFullNameFromLink(udf_link)
return self.DeleteResource(path,
'udfs',
udf_id,
None,
options) | def function[DeleteUserDefinedFunction, parameter[self, udf_link, options]]:
constant[Deletes a user defined function.
:param str udf_link:
The link to the user defined function.
:param dict options:
The request options for the request.
:return:
The deleted UDF.
:rtype:
dict
]
if compare[name[options] is constant[None]] begin[:]
variable[options] assign[=] dictionary[[], []]
variable[path] assign[=] call[name[base].GetPathFromLink, parameter[name[udf_link]]]
variable[udf_id] assign[=] call[name[base].GetResourceIdOrFullNameFromLink, parameter[name[udf_link]]]
return[call[name[self].DeleteResource, parameter[name[path], constant[udfs], name[udf_id], constant[None], name[options]]]] | keyword[def] identifier[DeleteUserDefinedFunction] ( identifier[self] , identifier[udf_link] , identifier[options] = keyword[None] ):
literal[string]
keyword[if] identifier[options] keyword[is] keyword[None] :
identifier[options] ={}
identifier[path] = identifier[base] . identifier[GetPathFromLink] ( identifier[udf_link] )
identifier[udf_id] = identifier[base] . identifier[GetResourceIdOrFullNameFromLink] ( identifier[udf_link] )
keyword[return] identifier[self] . identifier[DeleteResource] ( identifier[path] ,
literal[string] ,
identifier[udf_id] ,
keyword[None] ,
identifier[options] ) | def DeleteUserDefinedFunction(self, udf_link, options=None):
"""Deletes a user defined function.
:param str udf_link:
The link to the user defined function.
:param dict options:
The request options for the request.
:return:
The deleted UDF.
:rtype:
dict
"""
if options is None:
options = {} # depends on [control=['if'], data=['options']]
path = base.GetPathFromLink(udf_link)
udf_id = base.GetResourceIdOrFullNameFromLink(udf_link)
return self.DeleteResource(path, 'udfs', udf_id, None, options) |
def sample(self):
"Return a random sample from the distribution."
if self.sampler is None:
self.sampler = weighted_sampler(self.dictionary.keys(),
self.dictionary.values())
return self.sampler() | def function[sample, parameter[self]]:
constant[Return a random sample from the distribution.]
if compare[name[self].sampler is constant[None]] begin[:]
name[self].sampler assign[=] call[name[weighted_sampler], parameter[call[name[self].dictionary.keys, parameter[]], call[name[self].dictionary.values, parameter[]]]]
return[call[name[self].sampler, parameter[]]] | keyword[def] identifier[sample] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[sampler] keyword[is] keyword[None] :
identifier[self] . identifier[sampler] = identifier[weighted_sampler] ( identifier[self] . identifier[dictionary] . identifier[keys] (),
identifier[self] . identifier[dictionary] . identifier[values] ())
keyword[return] identifier[self] . identifier[sampler] () | def sample(self):
"""Return a random sample from the distribution."""
if self.sampler is None:
self.sampler = weighted_sampler(self.dictionary.keys(), self.dictionary.values()) # depends on [control=['if'], data=[]]
return self.sampler() |
def sapm_effective_irradiance(self, poa_direct, poa_diffuse,
airmass_absolute, aoi,
reference_irradiance=1000):
"""
Use the :py:func:`sapm_effective_irradiance` function, the input
parameters, and ``self.module_parameters`` to calculate
effective irradiance.
Parameters
----------
poa_direct : numeric
The direct irradiance incident upon the module.
poa_diffuse : numeric
The diffuse irradiance incident on module.
airmass_absolute : numeric
Absolute airmass.
aoi : numeric
Angle of incidence in degrees.
reference_irradiance : numeric, default 1000
Reference irradiance by which to divide the input irradiance.
Returns
-------
effective_irradiance : numeric
The SAPM effective irradiance.
"""
return sapm_effective_irradiance(
poa_direct, poa_diffuse, airmass_absolute, aoi,
self.module_parameters, reference_irradiance=reference_irradiance) | def function[sapm_effective_irradiance, parameter[self, poa_direct, poa_diffuse, airmass_absolute, aoi, reference_irradiance]]:
constant[
Use the :py:func:`sapm_effective_irradiance` function, the input
parameters, and ``self.module_parameters`` to calculate
effective irradiance.
Parameters
----------
poa_direct : numeric
The direct irradiance incident upon the module.
poa_diffuse : numeric
The diffuse irradiance incident on module.
airmass_absolute : numeric
Absolute airmass.
aoi : numeric
Angle of incidence in degrees.
reference_irradiance : numeric, default 1000
Reference irradiance by which to divide the input irradiance.
Returns
-------
effective_irradiance : numeric
The SAPM effective irradiance.
]
return[call[name[sapm_effective_irradiance], parameter[name[poa_direct], name[poa_diffuse], name[airmass_absolute], name[aoi], name[self].module_parameters]]] | keyword[def] identifier[sapm_effective_irradiance] ( identifier[self] , identifier[poa_direct] , identifier[poa_diffuse] ,
identifier[airmass_absolute] , identifier[aoi] ,
identifier[reference_irradiance] = literal[int] ):
literal[string]
keyword[return] identifier[sapm_effective_irradiance] (
identifier[poa_direct] , identifier[poa_diffuse] , identifier[airmass_absolute] , identifier[aoi] ,
identifier[self] . identifier[module_parameters] , identifier[reference_irradiance] = identifier[reference_irradiance] ) | def sapm_effective_irradiance(self, poa_direct, poa_diffuse, airmass_absolute, aoi, reference_irradiance=1000):
"""
Use the :py:func:`sapm_effective_irradiance` function, the input
parameters, and ``self.module_parameters`` to calculate
effective irradiance.
Parameters
----------
poa_direct : numeric
The direct irradiance incident upon the module.
poa_diffuse : numeric
The diffuse irradiance incident on module.
airmass_absolute : numeric
Absolute airmass.
aoi : numeric
Angle of incidence in degrees.
reference_irradiance : numeric, default 1000
Reference irradiance by which to divide the input irradiance.
Returns
-------
effective_irradiance : numeric
The SAPM effective irradiance.
"""
return sapm_effective_irradiance(poa_direct, poa_diffuse, airmass_absolute, aoi, self.module_parameters, reference_irradiance=reference_irradiance) |
def Find(self, node_type, item_type):
'''
method for finding specific types of notation from nodes.
will currently return the first one it encounters because this method's only really intended
for some types of notation for which the exact value doesn't really
matter.
:param node_type: the type of node to look under
:param item_type: the type of item (notation) being searched for
:return: first item_type object encountered
'''
if node_type == OtherNodes.DirectionNode:
child = self.GetChild(len(self.children) - 1)
while child is not None and not isinstance(
child.GetItem(),
item_type):
if child.GetItem().__class__.__name__ == item_type.__name__:
return True
child = child.GetChild(0)
if node_type == OtherNodes.ExpressionNode:
child = self.GetChild(len(self.children) - 2)
while child is not None and not isinstance(
child.GetItem(),
item_type):
if child.GetItem().__class__.__name__ == item_type.__name__:
return True
child = child.GetChild(0) | def function[Find, parameter[self, node_type, item_type]]:
constant[
method for finding specific types of notation from nodes.
will currently return the first one it encounters because this method's only really intended
for some types of notation for which the exact value doesn't really
matter.
:param node_type: the type of node to look under
:param item_type: the type of item (notation) being searched for
:return: first item_type object encountered
]
if compare[name[node_type] equal[==] name[OtherNodes].DirectionNode] begin[:]
variable[child] assign[=] call[name[self].GetChild, parameter[binary_operation[call[name[len], parameter[name[self].children]] - constant[1]]]]
while <ast.BoolOp object at 0x7da1b235fe80> begin[:]
if compare[call[name[child].GetItem, parameter[]].__class__.__name__ equal[==] name[item_type].__name__] begin[:]
return[constant[True]]
variable[child] assign[=] call[name[child].GetChild, parameter[constant[0]]]
if compare[name[node_type] equal[==] name[OtherNodes].ExpressionNode] begin[:]
variable[child] assign[=] call[name[self].GetChild, parameter[binary_operation[call[name[len], parameter[name[self].children]] - constant[2]]]]
while <ast.BoolOp object at 0x7da1b235e440> begin[:]
if compare[call[name[child].GetItem, parameter[]].__class__.__name__ equal[==] name[item_type].__name__] begin[:]
return[constant[True]]
variable[child] assign[=] call[name[child].GetChild, parameter[constant[0]]] | keyword[def] identifier[Find] ( identifier[self] , identifier[node_type] , identifier[item_type] ):
literal[string]
keyword[if] identifier[node_type] == identifier[OtherNodes] . identifier[DirectionNode] :
identifier[child] = identifier[self] . identifier[GetChild] ( identifier[len] ( identifier[self] . identifier[children] )- literal[int] )
keyword[while] identifier[child] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[isinstance] (
identifier[child] . identifier[GetItem] (),
identifier[item_type] ):
keyword[if] identifier[child] . identifier[GetItem] (). identifier[__class__] . identifier[__name__] == identifier[item_type] . identifier[__name__] :
keyword[return] keyword[True]
identifier[child] = identifier[child] . identifier[GetChild] ( literal[int] )
keyword[if] identifier[node_type] == identifier[OtherNodes] . identifier[ExpressionNode] :
identifier[child] = identifier[self] . identifier[GetChild] ( identifier[len] ( identifier[self] . identifier[children] )- literal[int] )
keyword[while] identifier[child] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[isinstance] (
identifier[child] . identifier[GetItem] (),
identifier[item_type] ):
keyword[if] identifier[child] . identifier[GetItem] (). identifier[__class__] . identifier[__name__] == identifier[item_type] . identifier[__name__] :
keyword[return] keyword[True]
identifier[child] = identifier[child] . identifier[GetChild] ( literal[int] ) | def Find(self, node_type, item_type):
"""
method for finding specific types of notation from nodes.
will currently return the first one it encounters because this method's only really intended
for some types of notation for which the exact value doesn't really
matter.
:param node_type: the type of node to look under
:param item_type: the type of item (notation) being searched for
:return: first item_type object encountered
"""
if node_type == OtherNodes.DirectionNode:
child = self.GetChild(len(self.children) - 1)
while child is not None and (not isinstance(child.GetItem(), item_type)):
if child.GetItem().__class__.__name__ == item_type.__name__:
return True # depends on [control=['if'], data=[]]
child = child.GetChild(0) # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]]
if node_type == OtherNodes.ExpressionNode:
child = self.GetChild(len(self.children) - 2)
while child is not None and (not isinstance(child.GetItem(), item_type)):
if child.GetItem().__class__.__name__ == item_type.__name__:
return True # depends on [control=['if'], data=[]]
child = child.GetChild(0) # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]] |
def som_get_capture_objects(som_pointer):
"""!
@brief Returns list of indexes of captured objects by each neuron.
@param[in] som_pointer (c_pointer): pointer to object of self-organized map.
"""
ccore = ccore_library.get()
ccore.som_get_capture_objects.restype = POINTER(pyclustering_package)
package = ccore.som_get_capture_objects(som_pointer)
result = package_extractor(package).extract()
return result | def function[som_get_capture_objects, parameter[som_pointer]]:
constant[!
@brief Returns list of indexes of captured objects by each neuron.
@param[in] som_pointer (c_pointer): pointer to object of self-organized map.
]
variable[ccore] assign[=] call[name[ccore_library].get, parameter[]]
name[ccore].som_get_capture_objects.restype assign[=] call[name[POINTER], parameter[name[pyclustering_package]]]
variable[package] assign[=] call[name[ccore].som_get_capture_objects, parameter[name[som_pointer]]]
variable[result] assign[=] call[call[name[package_extractor], parameter[name[package]]].extract, parameter[]]
return[name[result]] | keyword[def] identifier[som_get_capture_objects] ( identifier[som_pointer] ):
literal[string]
identifier[ccore] = identifier[ccore_library] . identifier[get] ()
identifier[ccore] . identifier[som_get_capture_objects] . identifier[restype] = identifier[POINTER] ( identifier[pyclustering_package] )
identifier[package] = identifier[ccore] . identifier[som_get_capture_objects] ( identifier[som_pointer] )
identifier[result] = identifier[package_extractor] ( identifier[package] ). identifier[extract] ()
keyword[return] identifier[result] | def som_get_capture_objects(som_pointer):
"""!
@brief Returns list of indexes of captured objects by each neuron.
@param[in] som_pointer (c_pointer): pointer to object of self-organized map.
"""
ccore = ccore_library.get()
ccore.som_get_capture_objects.restype = POINTER(pyclustering_package)
package = ccore.som_get_capture_objects(som_pointer)
result = package_extractor(package).extract()
return result |
def export_for_schema(self):
"""
Returns a string version of these replication options which are
suitable for use in a CREATE KEYSPACE statement.
"""
if self.options_map:
return dict((str(key), str(value)) for key, value in self.options_map.items())
return "{'class': '%s'}" % (self.name, ) | def function[export_for_schema, parameter[self]]:
constant[
Returns a string version of these replication options which are
suitable for use in a CREATE KEYSPACE statement.
]
if name[self].options_map begin[:]
return[call[name[dict], parameter[<ast.GeneratorExp object at 0x7da2054a7100>]]]
return[binary_operation[constant[{'class': '%s'}] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c6aaf80>]]]] | keyword[def] identifier[export_for_schema] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[options_map] :
keyword[return] identifier[dict] (( identifier[str] ( identifier[key] ), identifier[str] ( identifier[value] )) keyword[for] identifier[key] , identifier[value] keyword[in] identifier[self] . identifier[options_map] . identifier[items] ())
keyword[return] literal[string] %( identifier[self] . identifier[name] ,) | def export_for_schema(self):
"""
Returns a string version of these replication options which are
suitable for use in a CREATE KEYSPACE statement.
"""
if self.options_map:
return dict(((str(key), str(value)) for (key, value) in self.options_map.items())) # depends on [control=['if'], data=[]]
return "{'class': '%s'}" % (self.name,) |
def mark_dead(self, connection, now=None):
"""
Mark the connection as dead (failed). Remove it from the live pool and
put it on a timeout.
:arg connection: the failed instance
"""
# allow inject for testing purposes
now = now if now else time.time()
try:
self.connections.remove(connection)
except ValueError:
# connection not alive or another thread marked it already, ignore
return
else:
dead_count = self.dead_count.get(connection, 0) + 1
self.dead_count[connection] = dead_count
timeout = self.dead_timeout * 2 ** min(dead_count - 1,
self.timeout_cutoff)
self.dead.put((now + timeout, connection))
logger.warning(
'Connection %r has failed for %i times in a row,'
' putting on %i second timeout.',
connection, dead_count, timeout
) | def function[mark_dead, parameter[self, connection, now]]:
constant[
Mark the connection as dead (failed). Remove it from the live pool and
put it on a timeout.
:arg connection: the failed instance
]
variable[now] assign[=] <ast.IfExp object at 0x7da20e956e00>
<ast.Try object at 0x7da20e957f10> | keyword[def] identifier[mark_dead] ( identifier[self] , identifier[connection] , identifier[now] = keyword[None] ):
literal[string]
identifier[now] = identifier[now] keyword[if] identifier[now] keyword[else] identifier[time] . identifier[time] ()
keyword[try] :
identifier[self] . identifier[connections] . identifier[remove] ( identifier[connection] )
keyword[except] identifier[ValueError] :
keyword[return]
keyword[else] :
identifier[dead_count] = identifier[self] . identifier[dead_count] . identifier[get] ( identifier[connection] , literal[int] )+ literal[int]
identifier[self] . identifier[dead_count] [ identifier[connection] ]= identifier[dead_count]
identifier[timeout] = identifier[self] . identifier[dead_timeout] * literal[int] ** identifier[min] ( identifier[dead_count] - literal[int] ,
identifier[self] . identifier[timeout_cutoff] )
identifier[self] . identifier[dead] . identifier[put] (( identifier[now] + identifier[timeout] , identifier[connection] ))
identifier[logger] . identifier[warning] (
literal[string]
literal[string] ,
identifier[connection] , identifier[dead_count] , identifier[timeout]
) | def mark_dead(self, connection, now=None):
"""
Mark the connection as dead (failed). Remove it from the live pool and
put it on a timeout.
:arg connection: the failed instance
"""
# allow inject for testing purposes
now = now if now else time.time()
try:
self.connections.remove(connection) # depends on [control=['try'], data=[]]
except ValueError:
# connection not alive or another thread marked it already, ignore
return # depends on [control=['except'], data=[]]
else:
dead_count = self.dead_count.get(connection, 0) + 1
self.dead_count[connection] = dead_count
timeout = self.dead_timeout * 2 ** min(dead_count - 1, self.timeout_cutoff)
self.dead.put((now + timeout, connection))
logger.warning('Connection %r has failed for %i times in a row, putting on %i second timeout.', connection, dead_count, timeout) |
def create(name='local'):
"""Creates a new KVStore.
For single machine training, there are two commonly used types:
``local``: Copies all gradients to CPU memory and updates weights there.
``device``: Aggregates gradients and updates weights on GPUs. With this setting,
the KVStore also attempts to use GPU peer-to-peer communication,
potentially accelerating the communication.
For distributed training, KVStore also supports a number of types:
``dist_sync``: Behaves similarly to ``local`` but with one major difference.
With ``dist_sync``, batch-size now means the batch size used on each machine.
So if there are ``n`` machines and we use batch size ``b``,
then ``dist_sync`` behaves like ``local`` with batch size ``n * b``.
``dist_device_sync``: Identical to ``dist_sync`` with the difference similar
to ``device`` vs ``local``.
``dist_async``: Performs asynchronous updates.
The weights are updated whenever gradients are received from any machine.
No two updates happen on the same weight at the same time. However, the order is not
guaranteed.
Parameters
----------
name : {'local', 'device', 'nccl', 'dist_sync', 'dist_device_sync', 'dist_async'}
The type of KVStore.
Returns
-------
kv : KVStore
The created KVStore.
"""
if not isinstance(name, string_types):
raise TypeError('name must be a string')
handle = KVStoreHandle()
check_call(_LIB.MXKVStoreCreate(c_str(name),
ctypes.byref(handle)))
kv = KVStore(handle)
set_kvstore_handle(kv.handle)
return kv | def function[create, parameter[name]]:
constant[Creates a new KVStore.
For single machine training, there are two commonly used types:
``local``: Copies all gradients to CPU memory and updates weights there.
``device``: Aggregates gradients and updates weights on GPUs. With this setting,
the KVStore also attempts to use GPU peer-to-peer communication,
potentially accelerating the communication.
For distributed training, KVStore also supports a number of types:
``dist_sync``: Behaves similarly to ``local`` but with one major difference.
With ``dist_sync``, batch-size now means the batch size used on each machine.
So if there are ``n`` machines and we use batch size ``b``,
then ``dist_sync`` behaves like ``local`` with batch size ``n * b``.
``dist_device_sync``: Identical to ``dist_sync`` with the difference similar
to ``device`` vs ``local``.
``dist_async``: Performs asynchronous updates.
The weights are updated whenever gradients are received from any machine.
No two updates happen on the same weight at the same time. However, the order is not
guaranteed.
Parameters
----------
name : {'local', 'device', 'nccl', 'dist_sync', 'dist_device_sync', 'dist_async'}
The type of KVStore.
Returns
-------
kv : KVStore
The created KVStore.
]
if <ast.UnaryOp object at 0x7da18fe93af0> begin[:]
<ast.Raise object at 0x7da18fe92350>
variable[handle] assign[=] call[name[KVStoreHandle], parameter[]]
call[name[check_call], parameter[call[name[_LIB].MXKVStoreCreate, parameter[call[name[c_str], parameter[name[name]]], call[name[ctypes].byref, parameter[name[handle]]]]]]]
variable[kv] assign[=] call[name[KVStore], parameter[name[handle]]]
call[name[set_kvstore_handle], parameter[name[kv].handle]]
return[name[kv]] | keyword[def] identifier[create] ( identifier[name] = literal[string] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[name] , identifier[string_types] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[handle] = identifier[KVStoreHandle] ()
identifier[check_call] ( identifier[_LIB] . identifier[MXKVStoreCreate] ( identifier[c_str] ( identifier[name] ),
identifier[ctypes] . identifier[byref] ( identifier[handle] )))
identifier[kv] = identifier[KVStore] ( identifier[handle] )
identifier[set_kvstore_handle] ( identifier[kv] . identifier[handle] )
keyword[return] identifier[kv] | def create(name='local'):
"""Creates a new KVStore.
For single machine training, there are two commonly used types:
``local``: Copies all gradients to CPU memory and updates weights there.
``device``: Aggregates gradients and updates weights on GPUs. With this setting,
the KVStore also attempts to use GPU peer-to-peer communication,
potentially accelerating the communication.
For distributed training, KVStore also supports a number of types:
``dist_sync``: Behaves similarly to ``local`` but with one major difference.
With ``dist_sync``, batch-size now means the batch size used on each machine.
So if there are ``n`` machines and we use batch size ``b``,
then ``dist_sync`` behaves like ``local`` with batch size ``n * b``.
``dist_device_sync``: Identical to ``dist_sync`` with the difference similar
to ``device`` vs ``local``.
``dist_async``: Performs asynchronous updates.
The weights are updated whenever gradients are received from any machine.
No two updates happen on the same weight at the same time. However, the order is not
guaranteed.
Parameters
----------
name : {'local', 'device', 'nccl', 'dist_sync', 'dist_device_sync', 'dist_async'}
The type of KVStore.
Returns
-------
kv : KVStore
The created KVStore.
"""
if not isinstance(name, string_types):
raise TypeError('name must be a string') # depends on [control=['if'], data=[]]
handle = KVStoreHandle()
check_call(_LIB.MXKVStoreCreate(c_str(name), ctypes.byref(handle)))
kv = KVStore(handle)
set_kvstore_handle(kv.handle)
return kv |
def sensor_bias_encode(self, axBias, ayBias, azBias, gxBias, gyBias, gzBias):
'''
Accelerometer and gyro biases.
axBias : Accelerometer X bias (m/s) (float)
ayBias : Accelerometer Y bias (m/s) (float)
azBias : Accelerometer Z bias (m/s) (float)
gxBias : Gyro X bias (rad/s) (float)
gyBias : Gyro Y bias (rad/s) (float)
gzBias : Gyro Z bias (rad/s) (float)
'''
return MAVLink_sensor_bias_message(axBias, ayBias, azBias, gxBias, gyBias, gzBias) | def function[sensor_bias_encode, parameter[self, axBias, ayBias, azBias, gxBias, gyBias, gzBias]]:
constant[
Accelerometer and gyro biases.
axBias : Accelerometer X bias (m/s) (float)
ayBias : Accelerometer Y bias (m/s) (float)
azBias : Accelerometer Z bias (m/s) (float)
gxBias : Gyro X bias (rad/s) (float)
gyBias : Gyro Y bias (rad/s) (float)
gzBias : Gyro Z bias (rad/s) (float)
]
return[call[name[MAVLink_sensor_bias_message], parameter[name[axBias], name[ayBias], name[azBias], name[gxBias], name[gyBias], name[gzBias]]]] | keyword[def] identifier[sensor_bias_encode] ( identifier[self] , identifier[axBias] , identifier[ayBias] , identifier[azBias] , identifier[gxBias] , identifier[gyBias] , identifier[gzBias] ):
literal[string]
keyword[return] identifier[MAVLink_sensor_bias_message] ( identifier[axBias] , identifier[ayBias] , identifier[azBias] , identifier[gxBias] , identifier[gyBias] , identifier[gzBias] ) | def sensor_bias_encode(self, axBias, ayBias, azBias, gxBias, gyBias, gzBias):
"""
Accelerometer and gyro biases.
axBias : Accelerometer X bias (m/s) (float)
ayBias : Accelerometer Y bias (m/s) (float)
azBias : Accelerometer Z bias (m/s) (float)
gxBias : Gyro X bias (rad/s) (float)
gyBias : Gyro Y bias (rad/s) (float)
gzBias : Gyro Z bias (rad/s) (float)
"""
return MAVLink_sensor_bias_message(axBias, ayBias, azBias, gxBias, gyBias, gzBias) |
def perform_put(self, path, body, x_ms_version=None):
'''
Performs a PUT request and returns the response.
path:
Path to the resource.
Ex: '/<subscription-id>/services/hostedservices/<service-name>'
body:
Body for the PUT request.
x_ms_version:
If specified, this is used for the x-ms-version header.
Otherwise, self.x_ms_version is used.
'''
request = HTTPRequest()
request.method = 'PUT'
request.host = self.host
request.path = path
request.body = _get_request_body(body)
request.path, request.query = self._httpclient._update_request_uri_query(request)
request.headers = self._update_management_header(request, x_ms_version)
response = self._perform_request(request)
return response | def function[perform_put, parameter[self, path, body, x_ms_version]]:
constant[
Performs a PUT request and returns the response.
path:
Path to the resource.
Ex: '/<subscription-id>/services/hostedservices/<service-name>'
body:
Body for the PUT request.
x_ms_version:
If specified, this is used for the x-ms-version header.
Otherwise, self.x_ms_version is used.
]
variable[request] assign[=] call[name[HTTPRequest], parameter[]]
name[request].method assign[=] constant[PUT]
name[request].host assign[=] name[self].host
name[request].path assign[=] name[path]
name[request].body assign[=] call[name[_get_request_body], parameter[name[body]]]
<ast.Tuple object at 0x7da2054a4ca0> assign[=] call[name[self]._httpclient._update_request_uri_query, parameter[name[request]]]
name[request].headers assign[=] call[name[self]._update_management_header, parameter[name[request], name[x_ms_version]]]
variable[response] assign[=] call[name[self]._perform_request, parameter[name[request]]]
return[name[response]] | keyword[def] identifier[perform_put] ( identifier[self] , identifier[path] , identifier[body] , identifier[x_ms_version] = keyword[None] ):
literal[string]
identifier[request] = identifier[HTTPRequest] ()
identifier[request] . identifier[method] = literal[string]
identifier[request] . identifier[host] = identifier[self] . identifier[host]
identifier[request] . identifier[path] = identifier[path]
identifier[request] . identifier[body] = identifier[_get_request_body] ( identifier[body] )
identifier[request] . identifier[path] , identifier[request] . identifier[query] = identifier[self] . identifier[_httpclient] . identifier[_update_request_uri_query] ( identifier[request] )
identifier[request] . identifier[headers] = identifier[self] . identifier[_update_management_header] ( identifier[request] , identifier[x_ms_version] )
identifier[response] = identifier[self] . identifier[_perform_request] ( identifier[request] )
keyword[return] identifier[response] | def perform_put(self, path, body, x_ms_version=None):
"""
Performs a PUT request and returns the response.
path:
Path to the resource.
Ex: '/<subscription-id>/services/hostedservices/<service-name>'
body:
Body for the PUT request.
x_ms_version:
If specified, this is used for the x-ms-version header.
Otherwise, self.x_ms_version is used.
"""
request = HTTPRequest()
request.method = 'PUT'
request.host = self.host
request.path = path
request.body = _get_request_body(body)
(request.path, request.query) = self._httpclient._update_request_uri_query(request)
request.headers = self._update_management_header(request, x_ms_version)
response = self._perform_request(request)
return response |
def get_os_filename (path):
"""Return filesystem path for given URL path."""
if os.name == 'nt':
path = prepare_urlpath_for_nt(path)
res = urllib.url2pathname(fileutil.pathencode(path))
if os.name == 'nt' and res.endswith(':') and len(res) == 2:
# Work around http://bugs.python.org/issue11474
res += os.sep
return res | def function[get_os_filename, parameter[path]]:
constant[Return filesystem path for given URL path.]
if compare[name[os].name equal[==] constant[nt]] begin[:]
variable[path] assign[=] call[name[prepare_urlpath_for_nt], parameter[name[path]]]
variable[res] assign[=] call[name[urllib].url2pathname, parameter[call[name[fileutil].pathencode, parameter[name[path]]]]]
if <ast.BoolOp object at 0x7da18fe93d30> begin[:]
<ast.AugAssign object at 0x7da18fe91d50>
return[name[res]] | keyword[def] identifier[get_os_filename] ( identifier[path] ):
literal[string]
keyword[if] identifier[os] . identifier[name] == literal[string] :
identifier[path] = identifier[prepare_urlpath_for_nt] ( identifier[path] )
identifier[res] = identifier[urllib] . identifier[url2pathname] ( identifier[fileutil] . identifier[pathencode] ( identifier[path] ))
keyword[if] identifier[os] . identifier[name] == literal[string] keyword[and] identifier[res] . identifier[endswith] ( literal[string] ) keyword[and] identifier[len] ( identifier[res] )== literal[int] :
identifier[res] += identifier[os] . identifier[sep]
keyword[return] identifier[res] | def get_os_filename(path):
"""Return filesystem path for given URL path."""
if os.name == 'nt':
path = prepare_urlpath_for_nt(path) # depends on [control=['if'], data=[]]
res = urllib.url2pathname(fileutil.pathencode(path))
if os.name == 'nt' and res.endswith(':') and (len(res) == 2):
# Work around http://bugs.python.org/issue11474
res += os.sep # depends on [control=['if'], data=[]]
return res |
def gfrefn(t1, t2, s1, s2):
"""
For those times when we can't do better, we use a bisection
method to find the next time at which to test for state change.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfrefn_c.html
:param t1: One of two values bracketing a state change.
:type t1: float
:param t2: The other value that brackets a state change.
:type t2: float
:param s1: State at t1.
:type s1: bool
:param s2: State at t2.
:type s2: bool
:return: New value at which to check for transition.
:rtype: float
"""
t1 = ctypes.c_double(t1)
t2 = ctypes.c_double(t2)
s1 = ctypes.c_int(s1)
s2 = ctypes.c_int(s2)
t = ctypes.c_double()
libspice.gfrefn_c(t1, t2, s1, s2, ctypes.byref(t))
return t.value | def function[gfrefn, parameter[t1, t2, s1, s2]]:
constant[
For those times when we can't do better, we use a bisection
method to find the next time at which to test for state change.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfrefn_c.html
:param t1: One of two values bracketing a state change.
:type t1: float
:param t2: The other value that brackets a state change.
:type t2: float
:param s1: State at t1.
:type s1: bool
:param s2: State at t2.
:type s2: bool
:return: New value at which to check for transition.
:rtype: float
]
variable[t1] assign[=] call[name[ctypes].c_double, parameter[name[t1]]]
variable[t2] assign[=] call[name[ctypes].c_double, parameter[name[t2]]]
variable[s1] assign[=] call[name[ctypes].c_int, parameter[name[s1]]]
variable[s2] assign[=] call[name[ctypes].c_int, parameter[name[s2]]]
variable[t] assign[=] call[name[ctypes].c_double, parameter[]]
call[name[libspice].gfrefn_c, parameter[name[t1], name[t2], name[s1], name[s2], call[name[ctypes].byref, parameter[name[t]]]]]
return[name[t].value] | keyword[def] identifier[gfrefn] ( identifier[t1] , identifier[t2] , identifier[s1] , identifier[s2] ):
literal[string]
identifier[t1] = identifier[ctypes] . identifier[c_double] ( identifier[t1] )
identifier[t2] = identifier[ctypes] . identifier[c_double] ( identifier[t2] )
identifier[s1] = identifier[ctypes] . identifier[c_int] ( identifier[s1] )
identifier[s2] = identifier[ctypes] . identifier[c_int] ( identifier[s2] )
identifier[t] = identifier[ctypes] . identifier[c_double] ()
identifier[libspice] . identifier[gfrefn_c] ( identifier[t1] , identifier[t2] , identifier[s1] , identifier[s2] , identifier[ctypes] . identifier[byref] ( identifier[t] ))
keyword[return] identifier[t] . identifier[value] | def gfrefn(t1, t2, s1, s2):
"""
For those times when we can't do better, we use a bisection
method to find the next time at which to test for state change.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfrefn_c.html
:param t1: One of two values bracketing a state change.
:type t1: float
:param t2: The other value that brackets a state change.
:type t2: float
:param s1: State at t1.
:type s1: bool
:param s2: State at t2.
:type s2: bool
:return: New value at which to check for transition.
:rtype: float
"""
t1 = ctypes.c_double(t1)
t2 = ctypes.c_double(t2)
s1 = ctypes.c_int(s1)
s2 = ctypes.c_int(s2)
t = ctypes.c_double()
libspice.gfrefn_c(t1, t2, s1, s2, ctypes.byref(t))
return t.value |
def parent_callback(self, parent_fu):
"""Callback from executor future to update the parent.
Args:
- parent_fu (Future): Future returned by the executor along with callback
Returns:
- None
Updates the super() with the result() or exception()
"""
if parent_fu.done() is True:
e = parent_fu._exception
if e:
super().set_exception(e)
else:
super().set_result(self.file_obj)
return | def function[parent_callback, parameter[self, parent_fu]]:
constant[Callback from executor future to update the parent.
Args:
- parent_fu (Future): Future returned by the executor along with callback
Returns:
- None
Updates the super() with the result() or exception()
]
if compare[call[name[parent_fu].done, parameter[]] is constant[True]] begin[:]
variable[e] assign[=] name[parent_fu]._exception
if name[e] begin[:]
call[call[name[super], parameter[]].set_exception, parameter[name[e]]]
return[None] | keyword[def] identifier[parent_callback] ( identifier[self] , identifier[parent_fu] ):
literal[string]
keyword[if] identifier[parent_fu] . identifier[done] () keyword[is] keyword[True] :
identifier[e] = identifier[parent_fu] . identifier[_exception]
keyword[if] identifier[e] :
identifier[super] (). identifier[set_exception] ( identifier[e] )
keyword[else] :
identifier[super] (). identifier[set_result] ( identifier[self] . identifier[file_obj] )
keyword[return] | def parent_callback(self, parent_fu):
"""Callback from executor future to update the parent.
Args:
- parent_fu (Future): Future returned by the executor along with callback
Returns:
- None
Updates the super() with the result() or exception()
"""
if parent_fu.done() is True:
e = parent_fu._exception
if e:
super().set_exception(e) # depends on [control=['if'], data=[]]
else:
super().set_result(self.file_obj) # depends on [control=['if'], data=[]]
return |
def detectSymbianOS(self):
"""Return detection of SymbianOS
Detects if the current device is any Symbian OS-based device,
including older S60, Series 70, Series 80, Series 90, and UIQ,
or other browsers running on these devices.
"""
return UAgentInfo.deviceSymbian in self.__userAgent \
or UAgentInfo.deviceS60 in self.__userAgent \
or UAgentInfo.deviceS70 in self.__userAgent \
or UAgentInfo.deviceS80 in self.__userAgent \
or UAgentInfo.deviceS90 in self.__userAgent | def function[detectSymbianOS, parameter[self]]:
constant[Return detection of SymbianOS
Detects if the current device is any Symbian OS-based device,
including older S60, Series 70, Series 80, Series 90, and UIQ,
or other browsers running on these devices.
]
return[<ast.BoolOp object at 0x7da1b0aa5e70>] | keyword[def] identifier[detectSymbianOS] ( identifier[self] ):
literal[string]
keyword[return] identifier[UAgentInfo] . identifier[deviceSymbian] keyword[in] identifier[self] . identifier[__userAgent] keyword[or] identifier[UAgentInfo] . identifier[deviceS60] keyword[in] identifier[self] . identifier[__userAgent] keyword[or] identifier[UAgentInfo] . identifier[deviceS70] keyword[in] identifier[self] . identifier[__userAgent] keyword[or] identifier[UAgentInfo] . identifier[deviceS80] keyword[in] identifier[self] . identifier[__userAgent] keyword[or] identifier[UAgentInfo] . identifier[deviceS90] keyword[in] identifier[self] . identifier[__userAgent] | def detectSymbianOS(self):
"""Return detection of SymbianOS
Detects if the current device is any Symbian OS-based device,
including older S60, Series 70, Series 80, Series 90, and UIQ,
or other browsers running on these devices.
"""
return UAgentInfo.deviceSymbian in self.__userAgent or UAgentInfo.deviceS60 in self.__userAgent or UAgentInfo.deviceS70 in self.__userAgent or (UAgentInfo.deviceS80 in self.__userAgent) or (UAgentInfo.deviceS90 in self.__userAgent) |
def send_audio_file(
self, audio_file, device_state, authentication_headers,
dialog_request_id, distance_profile, audio_format
):
"""
Send audio to AVS
The file-like object are steaming uploaded for improved latency.
Returns:
bytes -- wav audio bytes returned from AVS
"""
payload = {
'context': device_state,
'event': {
'header': {
'namespace': 'SpeechRecognizer',
'name': 'Recognize',
'messageId': self.generate_message_id(),
'dialogRequestId': dialog_request_id,
},
'payload': {
'profile': distance_profile,
'format': audio_format
}
}
}
multipart_data = MultipartEncoder(
fields=[
(
'request', (
'request',
json.dumps(payload),
'application/json;',
{'Content-Disposition': "form-data; name='request'"}
),
),
(
'audio', (
'audio',
audio_file,
'application/octet-stream',
{'Content-Disposition': "form-data; name='audio'"}
)
),
],
boundary='boundary',
)
headers = {
**authentication_headers,
'Content-Type': multipart_data.content_type
}
stream_id = self.connection.request(
'POST',
'/v20160207/events',
headers=headers,
body=multipart_data,
)
response = self.connection.get_response(stream_id)
return self.parse_response(response) | def function[send_audio_file, parameter[self, audio_file, device_state, authentication_headers, dialog_request_id, distance_profile, audio_format]]:
constant[
Send audio to AVS
The file-like object are steaming uploaded for improved latency.
Returns:
bytes -- wav audio bytes returned from AVS
]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da18f720970>, <ast.Constant object at 0x7da18f7203d0>], [<ast.Name object at 0x7da18f7231c0>, <ast.Dict object at 0x7da18f722500>]]
variable[multipart_data] assign[=] call[name[MultipartEncoder], parameter[]]
variable[headers] assign[=] dictionary[[None, <ast.Constant object at 0x7da20c6c51b0>], [<ast.Name object at 0x7da20c6c7b50>, <ast.Attribute object at 0x7da20c6c4100>]]
variable[stream_id] assign[=] call[name[self].connection.request, parameter[constant[POST], constant[/v20160207/events]]]
variable[response] assign[=] call[name[self].connection.get_response, parameter[name[stream_id]]]
return[call[name[self].parse_response, parameter[name[response]]]] | keyword[def] identifier[send_audio_file] (
identifier[self] , identifier[audio_file] , identifier[device_state] , identifier[authentication_headers] ,
identifier[dialog_request_id] , identifier[distance_profile] , identifier[audio_format]
):
literal[string]
identifier[payload] ={
literal[string] : identifier[device_state] ,
literal[string] :{
literal[string] :{
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[self] . identifier[generate_message_id] (),
literal[string] : identifier[dialog_request_id] ,
},
literal[string] :{
literal[string] : identifier[distance_profile] ,
literal[string] : identifier[audio_format]
}
}
}
identifier[multipart_data] = identifier[MultipartEncoder] (
identifier[fields] =[
(
literal[string] ,(
literal[string] ,
identifier[json] . identifier[dumps] ( identifier[payload] ),
literal[string] ,
{ literal[string] : literal[string] }
),
),
(
literal[string] ,(
literal[string] ,
identifier[audio_file] ,
literal[string] ,
{ literal[string] : literal[string] }
)
),
],
identifier[boundary] = literal[string] ,
)
identifier[headers] ={
** identifier[authentication_headers] ,
literal[string] : identifier[multipart_data] . identifier[content_type]
}
identifier[stream_id] = identifier[self] . identifier[connection] . identifier[request] (
literal[string] ,
literal[string] ,
identifier[headers] = identifier[headers] ,
identifier[body] = identifier[multipart_data] ,
)
identifier[response] = identifier[self] . identifier[connection] . identifier[get_response] ( identifier[stream_id] )
keyword[return] identifier[self] . identifier[parse_response] ( identifier[response] ) | def send_audio_file(self, audio_file, device_state, authentication_headers, dialog_request_id, distance_profile, audio_format):
"""
Send audio to AVS
The file-like object are steaming uploaded for improved latency.
Returns:
bytes -- wav audio bytes returned from AVS
"""
payload = {'context': device_state, 'event': {'header': {'namespace': 'SpeechRecognizer', 'name': 'Recognize', 'messageId': self.generate_message_id(), 'dialogRequestId': dialog_request_id}, 'payload': {'profile': distance_profile, 'format': audio_format}}}
multipart_data = MultipartEncoder(fields=[('request', ('request', json.dumps(payload), 'application/json;', {'Content-Disposition': "form-data; name='request'"})), ('audio', ('audio', audio_file, 'application/octet-stream', {'Content-Disposition': "form-data; name='audio'"}))], boundary='boundary')
headers = {**authentication_headers, 'Content-Type': multipart_data.content_type}
stream_id = self.connection.request('POST', '/v20160207/events', headers=headers, body=multipart_data)
response = self.connection.get_response(stream_id)
return self.parse_response(response) |
def preprocess_text(text_string, function_list):
'''
Given each function within function_list, applies the order of functions put forward onto
text_string, returning the processed string as type str.
Keyword argument:
- function_list: list of functions available in preprocessing.text
- text_string: string instance
Exceptions raised:
- FunctionError: occurs should an invalid function be passed within the list of functions
- InputError: occurs should text_string be non-string, or function_list be non-list
'''
if text_string is None or text_string == "":
return ""
elif isinstance(text_string, str):
if isinstance(function_list, list):
for func in function_list:
try:
text_string = func(text_string)
except (NameError, TypeError):
raise FunctionError("invalid function passed as element of function_list")
except:
raise
return text_string
else:
raise InputError("list of functions not passed as argument for function_list")
else:
raise InputError("string not passed as argument for text_string") | def function[preprocess_text, parameter[text_string, function_list]]:
constant[
Given each function within function_list, applies the order of functions put forward onto
text_string, returning the processed string as type str.
Keyword argument:
- function_list: list of functions available in preprocessing.text
- text_string: string instance
Exceptions raised:
- FunctionError: occurs should an invalid function be passed within the list of functions
- InputError: occurs should text_string be non-string, or function_list be non-list
]
if <ast.BoolOp object at 0x7da2043456c0> begin[:]
return[constant[]] | keyword[def] identifier[preprocess_text] ( identifier[text_string] , identifier[function_list] ):
literal[string]
keyword[if] identifier[text_string] keyword[is] keyword[None] keyword[or] identifier[text_string] == literal[string] :
keyword[return] literal[string]
keyword[elif] identifier[isinstance] ( identifier[text_string] , identifier[str] ):
keyword[if] identifier[isinstance] ( identifier[function_list] , identifier[list] ):
keyword[for] identifier[func] keyword[in] identifier[function_list] :
keyword[try] :
identifier[text_string] = identifier[func] ( identifier[text_string] )
keyword[except] ( identifier[NameError] , identifier[TypeError] ):
keyword[raise] identifier[FunctionError] ( literal[string] )
keyword[except] :
keyword[raise]
keyword[return] identifier[text_string]
keyword[else] :
keyword[raise] identifier[InputError] ( literal[string] )
keyword[else] :
keyword[raise] identifier[InputError] ( literal[string] ) | def preprocess_text(text_string, function_list):
"""
Given each function within function_list, applies the order of functions put forward onto
text_string, returning the processed string as type str.
Keyword argument:
- function_list: list of functions available in preprocessing.text
- text_string: string instance
Exceptions raised:
- FunctionError: occurs should an invalid function be passed within the list of functions
- InputError: occurs should text_string be non-string, or function_list be non-list
"""
if text_string is None or text_string == '':
return '' # depends on [control=['if'], data=[]]
elif isinstance(text_string, str):
if isinstance(function_list, list):
for func in function_list:
try:
text_string = func(text_string) # depends on [control=['try'], data=[]]
except (NameError, TypeError):
raise FunctionError('invalid function passed as element of function_list') # depends on [control=['except'], data=[]]
except:
raise # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['func']]
return text_string # depends on [control=['if'], data=[]]
else:
raise InputError('list of functions not passed as argument for function_list') # depends on [control=['if'], data=[]]
else:
raise InputError('string not passed as argument for text_string') |
def ntoreturn(self):
"""Extract ntoreturn counter if available (lazy)."""
if not self._counters_calculated:
self._counters_calculated = True
self._extract_counters()
return self._ntoreturn | def function[ntoreturn, parameter[self]]:
constant[Extract ntoreturn counter if available (lazy).]
if <ast.UnaryOp object at 0x7da1b1782920> begin[:]
name[self]._counters_calculated assign[=] constant[True]
call[name[self]._extract_counters, parameter[]]
return[name[self]._ntoreturn] | keyword[def] identifier[ntoreturn] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_counters_calculated] :
identifier[self] . identifier[_counters_calculated] = keyword[True]
identifier[self] . identifier[_extract_counters] ()
keyword[return] identifier[self] . identifier[_ntoreturn] | def ntoreturn(self):
"""Extract ntoreturn counter if available (lazy)."""
if not self._counters_calculated:
self._counters_calculated = True
self._extract_counters() # depends on [control=['if'], data=[]]
return self._ntoreturn |
def get_achievements(self, name):
"""Get achievements for a player.
Must match on name, not index, since order is not always the same.
"""
postgame = self.get_postgame()
if not postgame:
return None
for achievements in postgame.achievements:
# achievements player name can be shorter
if name.startswith(achievements.player_name.replace(b'\x00', b'')):
return achievements
return None | def function[get_achievements, parameter[self, name]]:
constant[Get achievements for a player.
Must match on name, not index, since order is not always the same.
]
variable[postgame] assign[=] call[name[self].get_postgame, parameter[]]
if <ast.UnaryOp object at 0x7da1b25ee6e0> begin[:]
return[constant[None]]
for taget[name[achievements]] in starred[name[postgame].achievements] begin[:]
if call[name[name].startswith, parameter[call[name[achievements].player_name.replace, parameter[constant[b'\x00'], constant[b'']]]]] begin[:]
return[name[achievements]]
return[constant[None]] | keyword[def] identifier[get_achievements] ( identifier[self] , identifier[name] ):
literal[string]
identifier[postgame] = identifier[self] . identifier[get_postgame] ()
keyword[if] keyword[not] identifier[postgame] :
keyword[return] keyword[None]
keyword[for] identifier[achievements] keyword[in] identifier[postgame] . identifier[achievements] :
keyword[if] identifier[name] . identifier[startswith] ( identifier[achievements] . identifier[player_name] . identifier[replace] ( literal[string] , literal[string] )):
keyword[return] identifier[achievements]
keyword[return] keyword[None] | def get_achievements(self, name):
"""Get achievements for a player.
Must match on name, not index, since order is not always the same.
"""
postgame = self.get_postgame()
if not postgame:
return None # depends on [control=['if'], data=[]]
for achievements in postgame.achievements:
# achievements player name can be shorter
if name.startswith(achievements.player_name.replace(b'\x00', b'')):
return achievements # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['achievements']]
return None |
def reset_failed_attempts(ip_address=None, username=None):
""" reset the failed attempts for these ip's and usernames
"""
pipe = REDIS_SERVER.pipeline()
unblock_ip(ip_address, pipe=pipe)
unblock_username(username, pipe=pipe)
pipe.execute() | def function[reset_failed_attempts, parameter[ip_address, username]]:
constant[ reset the failed attempts for these ip's and usernames
]
variable[pipe] assign[=] call[name[REDIS_SERVER].pipeline, parameter[]]
call[name[unblock_ip], parameter[name[ip_address]]]
call[name[unblock_username], parameter[name[username]]]
call[name[pipe].execute, parameter[]] | keyword[def] identifier[reset_failed_attempts] ( identifier[ip_address] = keyword[None] , identifier[username] = keyword[None] ):
literal[string]
identifier[pipe] = identifier[REDIS_SERVER] . identifier[pipeline] ()
identifier[unblock_ip] ( identifier[ip_address] , identifier[pipe] = identifier[pipe] )
identifier[unblock_username] ( identifier[username] , identifier[pipe] = identifier[pipe] )
identifier[pipe] . identifier[execute] () | def reset_failed_attempts(ip_address=None, username=None):
""" reset the failed attempts for these ip's and usernames
"""
pipe = REDIS_SERVER.pipeline()
unblock_ip(ip_address, pipe=pipe)
unblock_username(username, pipe=pipe)
pipe.execute() |
def run_datafind_instance(cp, outputDir, connection, observatory, frameType,
startTime, endTime, ifo, tags=None):
"""
This function will query the datafind server once to find frames between
the specified times for the specified frame type and observatory.
Parameters
----------
cp : ConfigParser instance
Source for any kwargs that should be sent to the datafind module
outputDir : Output cache files will be written here. We also write the
commands for reproducing what is done in this function to this
directory.
connection : datafind connection object
Initialized through the glue.datafind module, this is the open
connection to the datafind server.
observatory : string
The observatory to query frames for. Ex. 'H', 'L' or 'V'. NB: not
'H1', 'L1', 'V1' which denote interferometers.
frameType : string
The frame type to query for.
startTime : int
Integer start time to query the datafind server for frames.
endTime : int
Integer end time to query the datafind server for frames.
ifo : string
The interferometer to use for naming output. Ex. 'H1', 'L1', 'V1'.
Maybe this could be merged with the observatory string, but this
could cause issues if running on old 'H2' and 'H1' data.
tags : list of string, optional (default=None)
Use this to specify tags. This can be used if this module is being
called more than once to give call specific configuration (by setting
options in [workflow-datafind-${TAG}] rather than [workflow-datafind]).
This is also used to tag the Files returned by the class to uniqueify
the Files and uniquify the actual filename.
FIXME: Filenames may not be unique with current codes!
Returns
--------
dfCache : glue.lal.Cache instance
The glue.lal.Cache representation of the call to the datafind
server and the returned frame files.
cacheFile : pycbc.workflow.core.File
Cache file listing all of the datafind output files for use later in the pipeline.
"""
if tags is None:
tags = []
seg = segments.segment([startTime, endTime])
# Take the datafind kwargs from config (usually urltype=file is
# given).
dfKwargs = {}
# By default ignore missing frames, this case is dealt with outside of here
dfKwargs['on_gaps'] = 'ignore'
if cp.has_section("datafind"):
for item, value in cp.items("datafind"):
dfKwargs[item] = value
for tag in tags:
if cp.has_section('datafind-%s' %(tag)):
for item, value in cp.items("datafind-%s" %(tag)):
dfKwargs[item] = value
# It is useful to print the corresponding command to the logs
# directory to check if this was expected.
log_datafind_command(observatory, frameType, startTime, endTime,
os.path.join(outputDir,'logs'), **dfKwargs)
logging.debug("Asking datafind server for frames.")
dfCache = connection.find_frame_urls(observatory, frameType,
startTime, endTime, **dfKwargs)
logging.debug("Frames returned")
# workflow format output file
cache_file = File(ifo, 'DATAFIND', seg, extension='lcf',
directory=outputDir, tags=tags)
cache_file.PFN(cache_file.cache_entry.path, site='local')
dfCache.ifo = ifo
# Dump output to file
fP = open(cache_file.storage_path, "w")
# FIXME: CANNOT use dfCache.tofile because it will print 815901601.00000
# as a gps time which is incompatible with the lal cache format
# (and the C codes) which demand an integer.
#dfCache.tofile(fP)
for entry in dfCache:
start = str(int(entry.segment[0]))
duration = str(int(abs(entry.segment)))
print("%s %s %s %s %s" \
% (entry.observatory, entry.description, start, duration, entry.url), file=fP)
entry.segment = segments.segment(int(entry.segment[0]), int(entry.segment[1]))
fP.close()
return dfCache, cache_file | def function[run_datafind_instance, parameter[cp, outputDir, connection, observatory, frameType, startTime, endTime, ifo, tags]]:
constant[
This function will query the datafind server once to find frames between
the specified times for the specified frame type and observatory.
Parameters
----------
cp : ConfigParser instance
Source for any kwargs that should be sent to the datafind module
outputDir : Output cache files will be written here. We also write the
commands for reproducing what is done in this function to this
directory.
connection : datafind connection object
Initialized through the glue.datafind module, this is the open
connection to the datafind server.
observatory : string
The observatory to query frames for. Ex. 'H', 'L' or 'V'. NB: not
'H1', 'L1', 'V1' which denote interferometers.
frameType : string
The frame type to query for.
startTime : int
Integer start time to query the datafind server for frames.
endTime : int
Integer end time to query the datafind server for frames.
ifo : string
The interferometer to use for naming output. Ex. 'H1', 'L1', 'V1'.
Maybe this could be merged with the observatory string, but this
could cause issues if running on old 'H2' and 'H1' data.
tags : list of string, optional (default=None)
Use this to specify tags. This can be used if this module is being
called more than once to give call specific configuration (by setting
options in [workflow-datafind-${TAG}] rather than [workflow-datafind]).
This is also used to tag the Files returned by the class to uniqueify
the Files and uniquify the actual filename.
FIXME: Filenames may not be unique with current codes!
Returns
--------
dfCache : glue.lal.Cache instance
The glue.lal.Cache representation of the call to the datafind
server and the returned frame files.
cacheFile : pycbc.workflow.core.File
Cache file listing all of the datafind output files for use later in the pipeline.
]
if compare[name[tags] is constant[None]] begin[:]
variable[tags] assign[=] list[[]]
variable[seg] assign[=] call[name[segments].segment, parameter[list[[<ast.Name object at 0x7da18f810ac0>, <ast.Name object at 0x7da18f811690>]]]]
variable[dfKwargs] assign[=] dictionary[[], []]
call[name[dfKwargs]][constant[on_gaps]] assign[=] constant[ignore]
if call[name[cp].has_section, parameter[constant[datafind]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da18f8128c0>, <ast.Name object at 0x7da18f811750>]]] in starred[call[name[cp].items, parameter[constant[datafind]]]] begin[:]
call[name[dfKwargs]][name[item]] assign[=] name[value]
for taget[name[tag]] in starred[name[tags]] begin[:]
if call[name[cp].has_section, parameter[binary_operation[constant[datafind-%s] <ast.Mod object at 0x7da2590d6920> name[tag]]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da2044c1330>, <ast.Name object at 0x7da2044c0220>]]] in starred[call[name[cp].items, parameter[binary_operation[constant[datafind-%s] <ast.Mod object at 0x7da2590d6920> name[tag]]]]] begin[:]
call[name[dfKwargs]][name[item]] assign[=] name[value]
call[name[log_datafind_command], parameter[name[observatory], name[frameType], name[startTime], name[endTime], call[name[os].path.join, parameter[name[outputDir], constant[logs]]]]]
call[name[logging].debug, parameter[constant[Asking datafind server for frames.]]]
variable[dfCache] assign[=] call[name[connection].find_frame_urls, parameter[name[observatory], name[frameType], name[startTime], name[endTime]]]
call[name[logging].debug, parameter[constant[Frames returned]]]
variable[cache_file] assign[=] call[name[File], parameter[name[ifo], constant[DATAFIND], name[seg]]]
call[name[cache_file].PFN, parameter[name[cache_file].cache_entry.path]]
name[dfCache].ifo assign[=] name[ifo]
variable[fP] assign[=] call[name[open], parameter[name[cache_file].storage_path, constant[w]]]
for taget[name[entry]] in starred[name[dfCache]] begin[:]
variable[start] assign[=] call[name[str], parameter[call[name[int], parameter[call[name[entry].segment][constant[0]]]]]]
variable[duration] assign[=] call[name[str], parameter[call[name[int], parameter[call[name[abs], parameter[name[entry].segment]]]]]]
call[name[print], parameter[binary_operation[constant[%s %s %s %s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b26ac820>, <ast.Attribute object at 0x7da1b26aecb0>, <ast.Name object at 0x7da1b26ace80>, <ast.Name object at 0x7da1b26af2b0>, <ast.Attribute object at 0x7da1b26ac040>]]]]]
name[entry].segment assign[=] call[name[segments].segment, parameter[call[name[int], parameter[call[name[entry].segment][constant[0]]]], call[name[int], parameter[call[name[entry].segment][constant[1]]]]]]
call[name[fP].close, parameter[]]
return[tuple[[<ast.Name object at 0x7da1b26afc70>, <ast.Name object at 0x7da1b26ad120>]]] | keyword[def] identifier[run_datafind_instance] ( identifier[cp] , identifier[outputDir] , identifier[connection] , identifier[observatory] , identifier[frameType] ,
identifier[startTime] , identifier[endTime] , identifier[ifo] , identifier[tags] = keyword[None] ):
literal[string]
keyword[if] identifier[tags] keyword[is] keyword[None] :
identifier[tags] =[]
identifier[seg] = identifier[segments] . identifier[segment] ([ identifier[startTime] , identifier[endTime] ])
identifier[dfKwargs] ={}
identifier[dfKwargs] [ literal[string] ]= literal[string]
keyword[if] identifier[cp] . identifier[has_section] ( literal[string] ):
keyword[for] identifier[item] , identifier[value] keyword[in] identifier[cp] . identifier[items] ( literal[string] ):
identifier[dfKwargs] [ identifier[item] ]= identifier[value]
keyword[for] identifier[tag] keyword[in] identifier[tags] :
keyword[if] identifier[cp] . identifier[has_section] ( literal[string] %( identifier[tag] )):
keyword[for] identifier[item] , identifier[value] keyword[in] identifier[cp] . identifier[items] ( literal[string] %( identifier[tag] )):
identifier[dfKwargs] [ identifier[item] ]= identifier[value]
identifier[log_datafind_command] ( identifier[observatory] , identifier[frameType] , identifier[startTime] , identifier[endTime] ,
identifier[os] . identifier[path] . identifier[join] ( identifier[outputDir] , literal[string] ),** identifier[dfKwargs] )
identifier[logging] . identifier[debug] ( literal[string] )
identifier[dfCache] = identifier[connection] . identifier[find_frame_urls] ( identifier[observatory] , identifier[frameType] ,
identifier[startTime] , identifier[endTime] ,** identifier[dfKwargs] )
identifier[logging] . identifier[debug] ( literal[string] )
identifier[cache_file] = identifier[File] ( identifier[ifo] , literal[string] , identifier[seg] , identifier[extension] = literal[string] ,
identifier[directory] = identifier[outputDir] , identifier[tags] = identifier[tags] )
identifier[cache_file] . identifier[PFN] ( identifier[cache_file] . identifier[cache_entry] . identifier[path] , identifier[site] = literal[string] )
identifier[dfCache] . identifier[ifo] = identifier[ifo]
identifier[fP] = identifier[open] ( identifier[cache_file] . identifier[storage_path] , literal[string] )
keyword[for] identifier[entry] keyword[in] identifier[dfCache] :
identifier[start] = identifier[str] ( identifier[int] ( identifier[entry] . identifier[segment] [ literal[int] ]))
identifier[duration] = identifier[str] ( identifier[int] ( identifier[abs] ( identifier[entry] . identifier[segment] )))
identifier[print] ( literal[string] %( identifier[entry] . identifier[observatory] , identifier[entry] . identifier[description] , identifier[start] , identifier[duration] , identifier[entry] . identifier[url] ), identifier[file] = identifier[fP] )
identifier[entry] . identifier[segment] = identifier[segments] . identifier[segment] ( identifier[int] ( identifier[entry] . identifier[segment] [ literal[int] ]), identifier[int] ( identifier[entry] . identifier[segment] [ literal[int] ]))
identifier[fP] . identifier[close] ()
keyword[return] identifier[dfCache] , identifier[cache_file] | def run_datafind_instance(cp, outputDir, connection, observatory, frameType, startTime, endTime, ifo, tags=None):
"""
This function will query the datafind server once to find frames between
the specified times for the specified frame type and observatory.
Parameters
----------
cp : ConfigParser instance
Source for any kwargs that should be sent to the datafind module
outputDir : Output cache files will be written here. We also write the
commands for reproducing what is done in this function to this
directory.
connection : datafind connection object
Initialized through the glue.datafind module, this is the open
connection to the datafind server.
observatory : string
The observatory to query frames for. Ex. 'H', 'L' or 'V'. NB: not
'H1', 'L1', 'V1' which denote interferometers.
frameType : string
The frame type to query for.
startTime : int
Integer start time to query the datafind server for frames.
endTime : int
Integer end time to query the datafind server for frames.
ifo : string
The interferometer to use for naming output. Ex. 'H1', 'L1', 'V1'.
Maybe this could be merged with the observatory string, but this
could cause issues if running on old 'H2' and 'H1' data.
tags : list of string, optional (default=None)
Use this to specify tags. This can be used if this module is being
called more than once to give call specific configuration (by setting
options in [workflow-datafind-${TAG}] rather than [workflow-datafind]).
This is also used to tag the Files returned by the class to uniqueify
the Files and uniquify the actual filename.
FIXME: Filenames may not be unique with current codes!
Returns
--------
dfCache : glue.lal.Cache instance
The glue.lal.Cache representation of the call to the datafind
server and the returned frame files.
cacheFile : pycbc.workflow.core.File
Cache file listing all of the datafind output files for use later in the pipeline.
"""
if tags is None:
tags = [] # depends on [control=['if'], data=['tags']]
seg = segments.segment([startTime, endTime])
# Take the datafind kwargs from config (usually urltype=file is
# given).
dfKwargs = {}
# By default ignore missing frames, this case is dealt with outside of here
dfKwargs['on_gaps'] = 'ignore'
if cp.has_section('datafind'):
for (item, value) in cp.items('datafind'):
dfKwargs[item] = value # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
for tag in tags:
if cp.has_section('datafind-%s' % tag):
for (item, value) in cp.items('datafind-%s' % tag):
dfKwargs[item] = value # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['tag']]
# It is useful to print the corresponding command to the logs
# directory to check if this was expected.
log_datafind_command(observatory, frameType, startTime, endTime, os.path.join(outputDir, 'logs'), **dfKwargs)
logging.debug('Asking datafind server for frames.')
dfCache = connection.find_frame_urls(observatory, frameType, startTime, endTime, **dfKwargs)
logging.debug('Frames returned')
# workflow format output file
cache_file = File(ifo, 'DATAFIND', seg, extension='lcf', directory=outputDir, tags=tags)
cache_file.PFN(cache_file.cache_entry.path, site='local')
dfCache.ifo = ifo
# Dump output to file
fP = open(cache_file.storage_path, 'w')
# FIXME: CANNOT use dfCache.tofile because it will print 815901601.00000
# as a gps time which is incompatible with the lal cache format
# (and the C codes) which demand an integer.
#dfCache.tofile(fP)
for entry in dfCache:
start = str(int(entry.segment[0]))
duration = str(int(abs(entry.segment)))
print('%s %s %s %s %s' % (entry.observatory, entry.description, start, duration, entry.url), file=fP)
entry.segment = segments.segment(int(entry.segment[0]), int(entry.segment[1])) # depends on [control=['for'], data=['entry']]
fP.close()
return (dfCache, cache_file) |
def re_parser(self, scode, *args):
"""
args: [arg1, arg2]
arg[0] = a valid regex pattern
arg[1] : if startswith('@') call sub; if startswith('$') call finditer,
$0, $1 means group index.
return an ensure_list
"""
def gen_match(matches, num):
for match in matches:
yield match.group(num)
scode = self.ensure_str(scode)
assert self._re.match(
'^@|^\$\d+', args[1]), ValueError('args1 should match ^@|^\$\d+')
arg1, arg2 = args[1][0], args[1][1:]
com = self._re.compile(args[0])
if arg1 == '@':
result = com.sub(arg2, scode)
return self.ensure_list(result)
else:
result = com.finditer(scode)
return gen_match(result, int(arg2)) | def function[re_parser, parameter[self, scode]]:
constant[
args: [arg1, arg2]
arg[0] = a valid regex pattern
arg[1] : if startswith('@') call sub; if startswith('$') call finditer,
$0, $1 means group index.
return an ensure_list
]
def function[gen_match, parameter[matches, num]]:
for taget[name[match]] in starred[name[matches]] begin[:]
<ast.Yield object at 0x7da20e9b0640>
variable[scode] assign[=] call[name[self].ensure_str, parameter[name[scode]]]
assert[call[name[self]._re.match, parameter[constant[^@|^\$\d+], call[name[args]][constant[1]]]]]
<ast.Tuple object at 0x7da20e9b30a0> assign[=] tuple[[<ast.Subscript object at 0x7da20e9b0df0>, <ast.Subscript object at 0x7da20e9b0e50>]]
variable[com] assign[=] call[name[self]._re.compile, parameter[call[name[args]][constant[0]]]]
if compare[name[arg1] equal[==] constant[@]] begin[:]
variable[result] assign[=] call[name[com].sub, parameter[name[arg2], name[scode]]]
return[call[name[self].ensure_list, parameter[name[result]]]] | keyword[def] identifier[re_parser] ( identifier[self] , identifier[scode] ,* identifier[args] ):
literal[string]
keyword[def] identifier[gen_match] ( identifier[matches] , identifier[num] ):
keyword[for] identifier[match] keyword[in] identifier[matches] :
keyword[yield] identifier[match] . identifier[group] ( identifier[num] )
identifier[scode] = identifier[self] . identifier[ensure_str] ( identifier[scode] )
keyword[assert] identifier[self] . identifier[_re] . identifier[match] (
literal[string] , identifier[args] [ literal[int] ]), identifier[ValueError] ( literal[string] )
identifier[arg1] , identifier[arg2] = identifier[args] [ literal[int] ][ literal[int] ], identifier[args] [ literal[int] ][ literal[int] :]
identifier[com] = identifier[self] . identifier[_re] . identifier[compile] ( identifier[args] [ literal[int] ])
keyword[if] identifier[arg1] == literal[string] :
identifier[result] = identifier[com] . identifier[sub] ( identifier[arg2] , identifier[scode] )
keyword[return] identifier[self] . identifier[ensure_list] ( identifier[result] )
keyword[else] :
identifier[result] = identifier[com] . identifier[finditer] ( identifier[scode] )
keyword[return] identifier[gen_match] ( identifier[result] , identifier[int] ( identifier[arg2] )) | def re_parser(self, scode, *args):
"""
args: [arg1, arg2]
arg[0] = a valid regex pattern
arg[1] : if startswith('@') call sub; if startswith('$') call finditer,
$0, $1 means group index.
return an ensure_list
"""
def gen_match(matches, num):
for match in matches:
yield match.group(num) # depends on [control=['for'], data=['match']]
scode = self.ensure_str(scode)
assert self._re.match('^@|^\\$\\d+', args[1]), ValueError('args1 should match ^@|^\\$\\d+')
(arg1, arg2) = (args[1][0], args[1][1:])
com = self._re.compile(args[0])
if arg1 == '@':
result = com.sub(arg2, scode)
return self.ensure_list(result) # depends on [control=['if'], data=[]]
else:
result = com.finditer(scode)
return gen_match(result, int(arg2)) |
def _checkIdEquality(self, requestedEffect, effect):
"""
Tests whether a requested effect and an effect
present in an annotation are equal.
"""
return self._idPresent(requestedEffect) and (
effect.term_id == requestedEffect.term_id) | def function[_checkIdEquality, parameter[self, requestedEffect, effect]]:
constant[
Tests whether a requested effect and an effect
present in an annotation are equal.
]
return[<ast.BoolOp object at 0x7da18f812830>] | keyword[def] identifier[_checkIdEquality] ( identifier[self] , identifier[requestedEffect] , identifier[effect] ):
literal[string]
keyword[return] identifier[self] . identifier[_idPresent] ( identifier[requestedEffect] ) keyword[and] (
identifier[effect] . identifier[term_id] == identifier[requestedEffect] . identifier[term_id] ) | def _checkIdEquality(self, requestedEffect, effect):
"""
Tests whether a requested effect and an effect
present in an annotation are equal.
"""
return self._idPresent(requestedEffect) and effect.term_id == requestedEffect.term_id |
def python_2_unicode_compatible(klass):
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if PY2:
if '__str__' not in klass.__dict__:
raise ValueError("@python_2_unicode_compatible cannot be applied "
"to %s because it doesn't define __str__()." %
klass.__name__)
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass | def function[python_2_unicode_compatible, parameter[klass]]:
constant[
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
]
if name[PY2] begin[:]
if compare[constant[__str__] <ast.NotIn object at 0x7da2590d7190> name[klass].__dict__] begin[:]
<ast.Raise object at 0x7da18bcc90f0>
name[klass].__unicode__ assign[=] name[klass].__str__
name[klass].__str__ assign[=] <ast.Lambda object at 0x7da18bcc99f0>
return[name[klass]] | keyword[def] identifier[python_2_unicode_compatible] ( identifier[klass] ):
literal[string]
keyword[if] identifier[PY2] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[klass] . identifier[__dict__] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] %
identifier[klass] . identifier[__name__] )
identifier[klass] . identifier[__unicode__] = identifier[klass] . identifier[__str__]
identifier[klass] . identifier[__str__] = keyword[lambda] identifier[self] : identifier[self] . identifier[__unicode__] (). identifier[encode] ( literal[string] )
keyword[return] identifier[klass] | def python_2_unicode_compatible(klass):
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if PY2:
if '__str__' not in klass.__dict__:
raise ValueError("@python_2_unicode_compatible cannot be applied to %s because it doesn't define __str__()." % klass.__name__) # depends on [control=['if'], data=[]]
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8') # depends on [control=['if'], data=[]]
return klass |
def block_lengths(self):
"""Gets the lengths of the blocks.
Note: This works with the property structure `_lengths_cache` to avoid
having to recompute these values each time they are needed.
"""
if self._lengths_cache is None:
# The first column will have the correct lengths. We have an
# invariant that requires that all blocks be the same length in a
# row of blocks.
self._lengths_cache = (
[obj.length() for obj in self._partitions_cache.T[0]]
if len(self._partitions_cache.T) > 0
else []
)
return self._lengths_cache | def function[block_lengths, parameter[self]]:
constant[Gets the lengths of the blocks.
Note: This works with the property structure `_lengths_cache` to avoid
having to recompute these values each time they are needed.
]
if compare[name[self]._lengths_cache is constant[None]] begin[:]
name[self]._lengths_cache assign[=] <ast.IfExp object at 0x7da20c76ecb0>
return[name[self]._lengths_cache] | keyword[def] identifier[block_lengths] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_lengths_cache] keyword[is] keyword[None] :
identifier[self] . identifier[_lengths_cache] =(
[ identifier[obj] . identifier[length] () keyword[for] identifier[obj] keyword[in] identifier[self] . identifier[_partitions_cache] . identifier[T] [ literal[int] ]]
keyword[if] identifier[len] ( identifier[self] . identifier[_partitions_cache] . identifier[T] )> literal[int]
keyword[else] []
)
keyword[return] identifier[self] . identifier[_lengths_cache] | def block_lengths(self):
"""Gets the lengths of the blocks.
Note: This works with the property structure `_lengths_cache` to avoid
having to recompute these values each time they are needed.
"""
if self._lengths_cache is None:
# The first column will have the correct lengths. We have an
# invariant that requires that all blocks be the same length in a
# row of blocks.
self._lengths_cache = [obj.length() for obj in self._partitions_cache.T[0]] if len(self._partitions_cache.T) > 0 else [] # depends on [control=['if'], data=[]]
return self._lengths_cache |
def get_job_logs(id):
"""Get the crawl logs from the job."""
crawler_job = models.CrawlerJob.query.filter_by(id=id).one_or_none()
if crawler_job is None:
click.secho(
(
"CrawlJob %s was not found, maybe it's not a crawl job?" %
id
),
fg='yellow',
)
sys.exit(1)
if crawler_job.logs is None:
click.secho(
(
"CrawlJob %s has no log, it might be that it has not run "
"yet, you can try again later." %
id
),
fg='yellow',
)
sys.exit(1)
_show_file(
file_path=crawler_job.logs,
header_name='Log',
) | def function[get_job_logs, parameter[id]]:
constant[Get the crawl logs from the job.]
variable[crawler_job] assign[=] call[call[name[models].CrawlerJob.query.filter_by, parameter[]].one_or_none, parameter[]]
if compare[name[crawler_job] is constant[None]] begin[:]
call[name[click].secho, parameter[binary_operation[constant[CrawlJob %s was not found, maybe it's not a crawl job?] <ast.Mod object at 0x7da2590d6920> name[id]]]]
call[name[sys].exit, parameter[constant[1]]]
if compare[name[crawler_job].logs is constant[None]] begin[:]
call[name[click].secho, parameter[binary_operation[constant[CrawlJob %s has no log, it might be that it has not run yet, you can try again later.] <ast.Mod object at 0x7da2590d6920> name[id]]]]
call[name[sys].exit, parameter[constant[1]]]
call[name[_show_file], parameter[]] | keyword[def] identifier[get_job_logs] ( identifier[id] ):
literal[string]
identifier[crawler_job] = identifier[models] . identifier[CrawlerJob] . identifier[query] . identifier[filter_by] ( identifier[id] = identifier[id] ). identifier[one_or_none] ()
keyword[if] identifier[crawler_job] keyword[is] keyword[None] :
identifier[click] . identifier[secho] (
(
literal[string] %
identifier[id]
),
identifier[fg] = literal[string] ,
)
identifier[sys] . identifier[exit] ( literal[int] )
keyword[if] identifier[crawler_job] . identifier[logs] keyword[is] keyword[None] :
identifier[click] . identifier[secho] (
(
literal[string]
literal[string] %
identifier[id]
),
identifier[fg] = literal[string] ,
)
identifier[sys] . identifier[exit] ( literal[int] )
identifier[_show_file] (
identifier[file_path] = identifier[crawler_job] . identifier[logs] ,
identifier[header_name] = literal[string] ,
) | def get_job_logs(id):
"""Get the crawl logs from the job."""
crawler_job = models.CrawlerJob.query.filter_by(id=id).one_or_none()
if crawler_job is None:
click.secho("CrawlJob %s was not found, maybe it's not a crawl job?" % id, fg='yellow')
sys.exit(1) # depends on [control=['if'], data=[]]
if crawler_job.logs is None:
click.secho('CrawlJob %s has no log, it might be that it has not run yet, you can try again later.' % id, fg='yellow')
sys.exit(1) # depends on [control=['if'], data=[]]
_show_file(file_path=crawler_job.logs, header_name='Log') |
def update_q(self, state_key, action_key, reward_value, next_max_q):
'''
Update Q-Value.
Args:
state_key: The key of state.
action_key: The key of action.
reward_value: R-Value(Reward).
next_max_q: Maximum Q-Value.
'''
# Now Q-Value.
q = self.extract_q_df(state_key, action_key)
# Update Q-Value.
new_q = q + self.alpha_value * (reward_value + (self.gamma_value * next_max_q) - q)
# Save updated Q-Value.
self.save_q_df(state_key, action_key, new_q) | def function[update_q, parameter[self, state_key, action_key, reward_value, next_max_q]]:
constant[
Update Q-Value.
Args:
state_key: The key of state.
action_key: The key of action.
reward_value: R-Value(Reward).
next_max_q: Maximum Q-Value.
]
variable[q] assign[=] call[name[self].extract_q_df, parameter[name[state_key], name[action_key]]]
variable[new_q] assign[=] binary_operation[name[q] + binary_operation[name[self].alpha_value * binary_operation[binary_operation[name[reward_value] + binary_operation[name[self].gamma_value * name[next_max_q]]] - name[q]]]]
call[name[self].save_q_df, parameter[name[state_key], name[action_key], name[new_q]]] | keyword[def] identifier[update_q] ( identifier[self] , identifier[state_key] , identifier[action_key] , identifier[reward_value] , identifier[next_max_q] ):
literal[string]
identifier[q] = identifier[self] . identifier[extract_q_df] ( identifier[state_key] , identifier[action_key] )
identifier[new_q] = identifier[q] + identifier[self] . identifier[alpha_value] *( identifier[reward_value] +( identifier[self] . identifier[gamma_value] * identifier[next_max_q] )- identifier[q] )
identifier[self] . identifier[save_q_df] ( identifier[state_key] , identifier[action_key] , identifier[new_q] ) | def update_q(self, state_key, action_key, reward_value, next_max_q):
"""
Update Q-Value.
Args:
state_key: The key of state.
action_key: The key of action.
reward_value: R-Value(Reward).
next_max_q: Maximum Q-Value.
"""
# Now Q-Value.
q = self.extract_q_df(state_key, action_key)
# Update Q-Value.
new_q = q + self.alpha_value * (reward_value + self.gamma_value * next_max_q - q)
# Save updated Q-Value.
self.save_q_df(state_key, action_key, new_q) |
def add_sample(self, name, labels, value, timestamp=None, exemplar=None):
"""Add a sample to the metric.
Internal-only, do not use."""
self.samples.append(Sample(name, labels, value, timestamp, exemplar)) | def function[add_sample, parameter[self, name, labels, value, timestamp, exemplar]]:
constant[Add a sample to the metric.
Internal-only, do not use.]
call[name[self].samples.append, parameter[call[name[Sample], parameter[name[name], name[labels], name[value], name[timestamp], name[exemplar]]]]] | keyword[def] identifier[add_sample] ( identifier[self] , identifier[name] , identifier[labels] , identifier[value] , identifier[timestamp] = keyword[None] , identifier[exemplar] = keyword[None] ):
literal[string]
identifier[self] . identifier[samples] . identifier[append] ( identifier[Sample] ( identifier[name] , identifier[labels] , identifier[value] , identifier[timestamp] , identifier[exemplar] )) | def add_sample(self, name, labels, value, timestamp=None, exemplar=None):
"""Add a sample to the metric.
Internal-only, do not use."""
self.samples.append(Sample(name, labels, value, timestamp, exemplar)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.