code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def round_to_next(x, base):
"""Round float to next multiple of base."""
# Based on: http://stackoverflow.com/a/2272174
return int(base * math.ceil(float(x)/base)) | def function[round_to_next, parameter[x, base]]:
constant[Round float to next multiple of base.]
return[call[name[int], parameter[binary_operation[name[base] * call[name[math].ceil, parameter[binary_operation[call[name[float], parameter[name[x]]] / name[base]]]]]]]] | keyword[def] identifier[round_to_next] ( identifier[x] , identifier[base] ):
literal[string]
keyword[return] identifier[int] ( identifier[base] * identifier[math] . identifier[ceil] ( identifier[float] ( identifier[x] )/ identifier[base] )) | def round_to_next(x, base):
"""Round float to next multiple of base."""
# Based on: http://stackoverflow.com/a/2272174
return int(base * math.ceil(float(x) / base)) |
def init_process(self) -> None:
"""
GunicornWorker 初始化回调
"""
default_loop = asyncio.get_event_loop()
if default_loop.is_running():
default_loop.close()
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.loop)
else:
self.loop = default_loop
super().init_process() | def function[init_process, parameter[self]]:
constant[
GunicornWorker 初始化回调
]
variable[default_loop] assign[=] call[name[asyncio].get_event_loop, parameter[]]
if call[name[default_loop].is_running, parameter[]] begin[:]
call[name[default_loop].close, parameter[]]
name[self].loop assign[=] call[name[asyncio].new_event_loop, parameter[]]
call[name[asyncio].set_event_loop, parameter[name[self].loop]]
call[call[name[super], parameter[]].init_process, parameter[]] | keyword[def] identifier[init_process] ( identifier[self] )-> keyword[None] :
literal[string]
identifier[default_loop] = identifier[asyncio] . identifier[get_event_loop] ()
keyword[if] identifier[default_loop] . identifier[is_running] ():
identifier[default_loop] . identifier[close] ()
identifier[self] . identifier[loop] = identifier[asyncio] . identifier[new_event_loop] ()
identifier[asyncio] . identifier[set_event_loop] ( identifier[self] . identifier[loop] )
keyword[else] :
identifier[self] . identifier[loop] = identifier[default_loop]
identifier[super] (). identifier[init_process] () | def init_process(self) -> None:
"""
GunicornWorker 初始化回调
"""
default_loop = asyncio.get_event_loop()
if default_loop.is_running():
default_loop.close()
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.loop) # depends on [control=['if'], data=[]]
else:
self.loop = default_loop
super().init_process() |
def roi_align(featuremap, boxes, resolution):
"""
Args:
featuremap: 1xCxHxW
boxes: Nx4 floatbox
resolution: output spatial resolution
Returns:
NxCx res x res
"""
# sample 4 locations per roi bin
ret = crop_and_resize(
featuremap, boxes,
tf.zeros([tf.shape(boxes)[0]], dtype=tf.int32),
resolution * 2)
ret = tf.nn.avg_pool(ret, [1, 1, 2, 2], [1, 1, 2, 2], padding='SAME', data_format='NCHW')
return ret | def function[roi_align, parameter[featuremap, boxes, resolution]]:
constant[
Args:
featuremap: 1xCxHxW
boxes: Nx4 floatbox
resolution: output spatial resolution
Returns:
NxCx res x res
]
variable[ret] assign[=] call[name[crop_and_resize], parameter[name[featuremap], name[boxes], call[name[tf].zeros, parameter[list[[<ast.Subscript object at 0x7da18bccbfd0>]]]], binary_operation[name[resolution] * constant[2]]]]
variable[ret] assign[=] call[name[tf].nn.avg_pool, parameter[name[ret], list[[<ast.Constant object at 0x7da18bccb850>, <ast.Constant object at 0x7da18bcca7a0>, <ast.Constant object at 0x7da18bccba60>, <ast.Constant object at 0x7da18bccb640>]], list[[<ast.Constant object at 0x7da18bccb4f0>, <ast.Constant object at 0x7da18bcc8eb0>, <ast.Constant object at 0x7da18bccb220>, <ast.Constant object at 0x7da18bccb730>]]]]
return[name[ret]] | keyword[def] identifier[roi_align] ( identifier[featuremap] , identifier[boxes] , identifier[resolution] ):
literal[string]
identifier[ret] = identifier[crop_and_resize] (
identifier[featuremap] , identifier[boxes] ,
identifier[tf] . identifier[zeros] ([ identifier[tf] . identifier[shape] ( identifier[boxes] )[ literal[int] ]], identifier[dtype] = identifier[tf] . identifier[int32] ),
identifier[resolution] * literal[int] )
identifier[ret] = identifier[tf] . identifier[nn] . identifier[avg_pool] ( identifier[ret] ,[ literal[int] , literal[int] , literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] , literal[int] ], identifier[padding] = literal[string] , identifier[data_format] = literal[string] )
keyword[return] identifier[ret] | def roi_align(featuremap, boxes, resolution):
"""
Args:
featuremap: 1xCxHxW
boxes: Nx4 floatbox
resolution: output spatial resolution
Returns:
NxCx res x res
"""
# sample 4 locations per roi bin
ret = crop_and_resize(featuremap, boxes, tf.zeros([tf.shape(boxes)[0]], dtype=tf.int32), resolution * 2)
ret = tf.nn.avg_pool(ret, [1, 1, 2, 2], [1, 1, 2, 2], padding='SAME', data_format='NCHW')
return ret |
def paginator(context, adjacent_pages=2):
"""
To be used in conjunction with the object_list generic view.
Adds pagination context variables for use in displaying first, adjacent and
last page links in addition to those created by the object_list generic view.
"""
current_page = context.get('page')
paginator = context.get('paginator')
if not paginator:
return
pages = paginator.num_pages
current_range = range(current_page - adjacent_pages, current_page + adjacent_pages + 1)
page_numbers = [n for n in current_range if n > 0 and n <= pages]
slugtype = ''
if 'topic_slug' in context:
page_url = context["topic"].get_short_url()
slugtype = 'topic'
elif 'forum_slug' in context:
page_url = '/forum/%s/' % context["forum_slug"]
slugtype = 'forum'
else:
page_url = context['request'].get_full_path()
return {
"is_paginated": context["is_paginated"],
"page": current_page,
"pages": pages,
"page_obj": context['page_obj'],
"page_numbers": page_numbers,
"has_next": context["page_obj"].has_next(),
"has_previous": context["page_obj"].has_previous(),
"page_url" : page_url,
'slugtype' : slugtype,
} | def function[paginator, parameter[context, adjacent_pages]]:
constant[
To be used in conjunction with the object_list generic view.
Adds pagination context variables for use in displaying first, adjacent and
last page links in addition to those created by the object_list generic view.
]
variable[current_page] assign[=] call[name[context].get, parameter[constant[page]]]
variable[paginator] assign[=] call[name[context].get, parameter[constant[paginator]]]
if <ast.UnaryOp object at 0x7da1b1443a00> begin[:]
return[None]
variable[pages] assign[=] name[paginator].num_pages
variable[current_range] assign[=] call[name[range], parameter[binary_operation[name[current_page] - name[adjacent_pages]], binary_operation[binary_operation[name[current_page] + name[adjacent_pages]] + constant[1]]]]
variable[page_numbers] assign[=] <ast.ListComp object at 0x7da1b14e5d80>
variable[slugtype] assign[=] constant[]
if compare[constant[topic_slug] in name[context]] begin[:]
variable[page_url] assign[=] call[call[name[context]][constant[topic]].get_short_url, parameter[]]
variable[slugtype] assign[=] constant[topic]
return[dictionary[[<ast.Constant object at 0x7da1b14c57b0>, <ast.Constant object at 0x7da1b14c6620>, <ast.Constant object at 0x7da1b14c4760>, <ast.Constant object at 0x7da1b14c58a0>, <ast.Constant object at 0x7da1b14c4610>, <ast.Constant object at 0x7da1b14c4a00>, <ast.Constant object at 0x7da1b14c62c0>, <ast.Constant object at 0x7da1b14c7070>, <ast.Constant object at 0x7da1b14c6860>], [<ast.Subscript object at 0x7da1b14c4c70>, <ast.Name object at 0x7da1b157a3e0>, <ast.Name object at 0x7da1b157a9e0>, <ast.Subscript object at 0x7da1b157b730>, <ast.Name object at 0x7da1b157a1d0>, <ast.Call object at 0x7da1b157b670>, <ast.Call object at 0x7da1b157bca0>, <ast.Name object at 0x7da1b1579d50>, <ast.Name object at 0x7da1b157a6b0>]]] | keyword[def] identifier[paginator] ( identifier[context] , identifier[adjacent_pages] = literal[int] ):
literal[string]
identifier[current_page] = identifier[context] . identifier[get] ( literal[string] )
identifier[paginator] = identifier[context] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[paginator] :
keyword[return]
identifier[pages] = identifier[paginator] . identifier[num_pages]
identifier[current_range] = identifier[range] ( identifier[current_page] - identifier[adjacent_pages] , identifier[current_page] + identifier[adjacent_pages] + literal[int] )
identifier[page_numbers] =[ identifier[n] keyword[for] identifier[n] keyword[in] identifier[current_range] keyword[if] identifier[n] > literal[int] keyword[and] identifier[n] <= identifier[pages] ]
identifier[slugtype] = literal[string]
keyword[if] literal[string] keyword[in] identifier[context] :
identifier[page_url] = identifier[context] [ literal[string] ]. identifier[get_short_url] ()
identifier[slugtype] = literal[string]
keyword[elif] literal[string] keyword[in] identifier[context] :
identifier[page_url] = literal[string] % identifier[context] [ literal[string] ]
identifier[slugtype] = literal[string]
keyword[else] :
identifier[page_url] = identifier[context] [ literal[string] ]. identifier[get_full_path] ()
keyword[return] {
literal[string] : identifier[context] [ literal[string] ],
literal[string] : identifier[current_page] ,
literal[string] : identifier[pages] ,
literal[string] : identifier[context] [ literal[string] ],
literal[string] : identifier[page_numbers] ,
literal[string] : identifier[context] [ literal[string] ]. identifier[has_next] (),
literal[string] : identifier[context] [ literal[string] ]. identifier[has_previous] (),
literal[string] : identifier[page_url] ,
literal[string] : identifier[slugtype] ,
} | def paginator(context, adjacent_pages=2):
"""
To be used in conjunction with the object_list generic view.
Adds pagination context variables for use in displaying first, adjacent and
last page links in addition to those created by the object_list generic view.
"""
current_page = context.get('page')
paginator = context.get('paginator')
if not paginator:
return # depends on [control=['if'], data=[]]
pages = paginator.num_pages
current_range = range(current_page - adjacent_pages, current_page + adjacent_pages + 1)
page_numbers = [n for n in current_range if n > 0 and n <= pages]
slugtype = ''
if 'topic_slug' in context:
page_url = context['topic'].get_short_url()
slugtype = 'topic' # depends on [control=['if'], data=['context']]
elif 'forum_slug' in context:
page_url = '/forum/%s/' % context['forum_slug']
slugtype = 'forum' # depends on [control=['if'], data=['context']]
else:
page_url = context['request'].get_full_path()
return {'is_paginated': context['is_paginated'], 'page': current_page, 'pages': pages, 'page_obj': context['page_obj'], 'page_numbers': page_numbers, 'has_next': context['page_obj'].has_next(), 'has_previous': context['page_obj'].has_previous(), 'page_url': page_url, 'slugtype': slugtype} |
def prep_directory(self, target_dir):
"""
Prepares a new directory to store the file at the provided path, if needed.
"""
dirname = path.dirname(target_dir)
if dirname:
dirname = path.join(settings.BUILD_DIR, dirname)
if not self.fs.exists(dirname):
logger.debug("Creating directory at {}{}".format(self.fs_name, dirname))
self.fs.makedirs(dirname) | def function[prep_directory, parameter[self, target_dir]]:
constant[
Prepares a new directory to store the file at the provided path, if needed.
]
variable[dirname] assign[=] call[name[path].dirname, parameter[name[target_dir]]]
if name[dirname] begin[:]
variable[dirname] assign[=] call[name[path].join, parameter[name[settings].BUILD_DIR, name[dirname]]]
if <ast.UnaryOp object at 0x7da204566140> begin[:]
call[name[logger].debug, parameter[call[constant[Creating directory at {}{}].format, parameter[name[self].fs_name, name[dirname]]]]]
call[name[self].fs.makedirs, parameter[name[dirname]]] | keyword[def] identifier[prep_directory] ( identifier[self] , identifier[target_dir] ):
literal[string]
identifier[dirname] = identifier[path] . identifier[dirname] ( identifier[target_dir] )
keyword[if] identifier[dirname] :
identifier[dirname] = identifier[path] . identifier[join] ( identifier[settings] . identifier[BUILD_DIR] , identifier[dirname] )
keyword[if] keyword[not] identifier[self] . identifier[fs] . identifier[exists] ( identifier[dirname] ):
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[self] . identifier[fs_name] , identifier[dirname] ))
identifier[self] . identifier[fs] . identifier[makedirs] ( identifier[dirname] ) | def prep_directory(self, target_dir):
"""
Prepares a new directory to store the file at the provided path, if needed.
"""
dirname = path.dirname(target_dir)
if dirname:
dirname = path.join(settings.BUILD_DIR, dirname)
if not self.fs.exists(dirname):
logger.debug('Creating directory at {}{}'.format(self.fs_name, dirname))
self.fs.makedirs(dirname) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def reg_concrete(self, *args, **kwargs):
"""
Returns the contents of a register but, if that register is symbolic,
raises a SimValueError.
"""
e = self.registers.load(*args, **kwargs)
if self.solver.symbolic(e):
raise SimValueError("target of reg_concrete is symbolic!")
return self.solver.eval(e) | def function[reg_concrete, parameter[self]]:
constant[
Returns the contents of a register but, if that register is symbolic,
raises a SimValueError.
]
variable[e] assign[=] call[name[self].registers.load, parameter[<ast.Starred object at 0x7da1b21e3ee0>]]
if call[name[self].solver.symbolic, parameter[name[e]]] begin[:]
<ast.Raise object at 0x7da20c6c7880>
return[call[name[self].solver.eval, parameter[name[e]]]] | keyword[def] identifier[reg_concrete] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[e] = identifier[self] . identifier[registers] . identifier[load] (* identifier[args] ,** identifier[kwargs] )
keyword[if] identifier[self] . identifier[solver] . identifier[symbolic] ( identifier[e] ):
keyword[raise] identifier[SimValueError] ( literal[string] )
keyword[return] identifier[self] . identifier[solver] . identifier[eval] ( identifier[e] ) | def reg_concrete(self, *args, **kwargs):
"""
Returns the contents of a register but, if that register is symbolic,
raises a SimValueError.
"""
e = self.registers.load(*args, **kwargs)
if self.solver.symbolic(e):
raise SimValueError('target of reg_concrete is symbolic!') # depends on [control=['if'], data=[]]
return self.solver.eval(e) |
async def main(interface=None):
""" Main function """
qtm_ip = await choose_qtm_instance(interface)
if qtm_ip is None:
return
while True:
connection = await qtm.connect(qtm_ip, 22223, version="1.18")
if connection is None:
return
await connection.get_state()
await connection.byte_order()
async with qtm.TakeControl(connection, "password"):
result = await connection.close()
if result == b"Closing connection":
await connection.await_event(qtm.QRTEvent.EventConnectionClosed)
await connection.load(QTM_FILE)
await connection.start(rtfromfile=True)
(await connection.get_current_frame()).get_3d_markers()
queue = asyncio.Queue()
asyncio.ensure_future(packet_receiver(queue))
try:
await connection.stream_frames(
components=["incorrect"], on_packet=queue.put_nowait
)
except qtm.QRTCommandException as exception:
LOG.info("exception %s", exception)
await connection.stream_frames(
components=["3d"], on_packet=queue.put_nowait
)
await asyncio.sleep(0.5)
await connection.byte_order()
await asyncio.sleep(0.5)
await connection.stream_frames_stop()
queue.put_nowait(None)
await connection.get_parameters(parameters=["3d"])
await connection.stop()
await connection.await_event()
await connection.new()
await connection.await_event(qtm.QRTEvent.EventConnected)
await connection.start()
await connection.await_event(qtm.QRTEvent.EventWaitingForTrigger)
await connection.trig()
await connection.await_event(qtm.QRTEvent.EventCaptureStarted)
await asyncio.sleep(0.5)
await connection.set_qtm_event()
await asyncio.sleep(0.001)
await connection.set_qtm_event("with_label")
await asyncio.sleep(0.5)
await connection.stop()
await connection.await_event(qtm.QRTEvent.EventCaptureStopped)
await connection.save(r"measurement.qtm")
await asyncio.sleep(3)
await connection.close()
connection.disconnect() | <ast.AsyncFunctionDef object at 0x7da1affe65f0> | keyword[async] keyword[def] identifier[main] ( identifier[interface] = keyword[None] ):
literal[string]
identifier[qtm_ip] = keyword[await] identifier[choose_qtm_instance] ( identifier[interface] )
keyword[if] identifier[qtm_ip] keyword[is] keyword[None] :
keyword[return]
keyword[while] keyword[True] :
identifier[connection] = keyword[await] identifier[qtm] . identifier[connect] ( identifier[qtm_ip] , literal[int] , identifier[version] = literal[string] )
keyword[if] identifier[connection] keyword[is] keyword[None] :
keyword[return]
keyword[await] identifier[connection] . identifier[get_state] ()
keyword[await] identifier[connection] . identifier[byte_order] ()
keyword[async] keyword[with] identifier[qtm] . identifier[TakeControl] ( identifier[connection] , literal[string] ):
identifier[result] = keyword[await] identifier[connection] . identifier[close] ()
keyword[if] identifier[result] == literal[string] :
keyword[await] identifier[connection] . identifier[await_event] ( identifier[qtm] . identifier[QRTEvent] . identifier[EventConnectionClosed] )
keyword[await] identifier[connection] . identifier[load] ( identifier[QTM_FILE] )
keyword[await] identifier[connection] . identifier[start] ( identifier[rtfromfile] = keyword[True] )
( keyword[await] identifier[connection] . identifier[get_current_frame] ()). identifier[get_3d_markers] ()
identifier[queue] = identifier[asyncio] . identifier[Queue] ()
identifier[asyncio] . identifier[ensure_future] ( identifier[packet_receiver] ( identifier[queue] ))
keyword[try] :
keyword[await] identifier[connection] . identifier[stream_frames] (
identifier[components] =[ literal[string] ], identifier[on_packet] = identifier[queue] . identifier[put_nowait]
)
keyword[except] identifier[qtm] . identifier[QRTCommandException] keyword[as] identifier[exception] :
identifier[LOG] . identifier[info] ( literal[string] , identifier[exception] )
keyword[await] identifier[connection] . identifier[stream_frames] (
identifier[components] =[ literal[string] ], identifier[on_packet] = identifier[queue] . identifier[put_nowait]
)
keyword[await] identifier[asyncio] . identifier[sleep] ( literal[int] )
keyword[await] identifier[connection] . identifier[byte_order] ()
keyword[await] identifier[asyncio] . identifier[sleep] ( literal[int] )
keyword[await] identifier[connection] . identifier[stream_frames_stop] ()
identifier[queue] . identifier[put_nowait] ( keyword[None] )
keyword[await] identifier[connection] . identifier[get_parameters] ( identifier[parameters] =[ literal[string] ])
keyword[await] identifier[connection] . identifier[stop] ()
keyword[await] identifier[connection] . identifier[await_event] ()
keyword[await] identifier[connection] . identifier[new] ()
keyword[await] identifier[connection] . identifier[await_event] ( identifier[qtm] . identifier[QRTEvent] . identifier[EventConnected] )
keyword[await] identifier[connection] . identifier[start] ()
keyword[await] identifier[connection] . identifier[await_event] ( identifier[qtm] . identifier[QRTEvent] . identifier[EventWaitingForTrigger] )
keyword[await] identifier[connection] . identifier[trig] ()
keyword[await] identifier[connection] . identifier[await_event] ( identifier[qtm] . identifier[QRTEvent] . identifier[EventCaptureStarted] )
keyword[await] identifier[asyncio] . identifier[sleep] ( literal[int] )
keyword[await] identifier[connection] . identifier[set_qtm_event] ()
keyword[await] identifier[asyncio] . identifier[sleep] ( literal[int] )
keyword[await] identifier[connection] . identifier[set_qtm_event] ( literal[string] )
keyword[await] identifier[asyncio] . identifier[sleep] ( literal[int] )
keyword[await] identifier[connection] . identifier[stop] ()
keyword[await] identifier[connection] . identifier[await_event] ( identifier[qtm] . identifier[QRTEvent] . identifier[EventCaptureStopped] )
keyword[await] identifier[connection] . identifier[save] ( literal[string] )
keyword[await] identifier[asyncio] . identifier[sleep] ( literal[int] )
keyword[await] identifier[connection] . identifier[close] ()
identifier[connection] . identifier[disconnect] () | async def main(interface=None):
""" Main function """
qtm_ip = await choose_qtm_instance(interface)
if qtm_ip is None:
return # depends on [control=['if'], data=[]]
while True:
connection = await qtm.connect(qtm_ip, 22223, version='1.18')
if connection is None:
return # depends on [control=['if'], data=[]]
await connection.get_state()
await connection.byte_order()
async with qtm.TakeControl(connection, 'password'):
result = await connection.close()
if result == b'Closing connection':
await connection.await_event(qtm.QRTEvent.EventConnectionClosed) # depends on [control=['if'], data=[]]
await connection.load(QTM_FILE)
await connection.start(rtfromfile=True)
(await connection.get_current_frame()).get_3d_markers()
queue = asyncio.Queue()
asyncio.ensure_future(packet_receiver(queue))
try:
await connection.stream_frames(components=['incorrect'], on_packet=queue.put_nowait) # depends on [control=['try'], data=[]]
except qtm.QRTCommandException as exception:
LOG.info('exception %s', exception) # depends on [control=['except'], data=['exception']]
await connection.stream_frames(components=['3d'], on_packet=queue.put_nowait)
await asyncio.sleep(0.5)
await connection.byte_order()
await asyncio.sleep(0.5)
await connection.stream_frames_stop()
queue.put_nowait(None)
await connection.get_parameters(parameters=['3d'])
await connection.stop()
await connection.await_event()
await connection.new()
await connection.await_event(qtm.QRTEvent.EventConnected)
await connection.start()
await connection.await_event(qtm.QRTEvent.EventWaitingForTrigger)
await connection.trig()
await connection.await_event(qtm.QRTEvent.EventCaptureStarted)
await asyncio.sleep(0.5)
await connection.set_qtm_event()
await asyncio.sleep(0.001)
await connection.set_qtm_event('with_label')
await asyncio.sleep(0.5)
await connection.stop()
await connection.await_event(qtm.QRTEvent.EventCaptureStopped)
await connection.save('measurement.qtm')
await asyncio.sleep(3)
await connection.close()
connection.disconnect() # depends on [control=['while'], data=[]] |
def by_image_seq(blocks, image_seq):
"""Filter blocks to return only those associated with the provided image_seq number.
Argument:
List:blocks -- List of block objects to sort.
Int:image_seq -- image_seq number found in ec_hdr.
Returns:
List -- List of block indexes matching image_seq number.
"""
return list(filter(lambda block: blocks[block].ec_hdr.image_seq == image_seq, blocks)) | def function[by_image_seq, parameter[blocks, image_seq]]:
constant[Filter blocks to return only those associated with the provided image_seq number.
Argument:
List:blocks -- List of block objects to sort.
Int:image_seq -- image_seq number found in ec_hdr.
Returns:
List -- List of block indexes matching image_seq number.
]
return[call[name[list], parameter[call[name[filter], parameter[<ast.Lambda object at 0x7da1b26af250>, name[blocks]]]]]] | keyword[def] identifier[by_image_seq] ( identifier[blocks] , identifier[image_seq] ):
literal[string]
keyword[return] identifier[list] ( identifier[filter] ( keyword[lambda] identifier[block] : identifier[blocks] [ identifier[block] ]. identifier[ec_hdr] . identifier[image_seq] == identifier[image_seq] , identifier[blocks] )) | def by_image_seq(blocks, image_seq):
"""Filter blocks to return only those associated with the provided image_seq number.
Argument:
List:blocks -- List of block objects to sort.
Int:image_seq -- image_seq number found in ec_hdr.
Returns:
List -- List of block indexes matching image_seq number.
"""
return list(filter(lambda block: blocks[block].ec_hdr.image_seq == image_seq, blocks)) |
def remove_short_sci_segs(self, minSegLength):
"""
Function to remove all science segments
shorter than a specific length. Also updates the file on disk to remove
these segments.
Parameters
-----------
minSegLength : int
Maximum length of science segments. Segments shorter than this will
be removed.
"""
newsegment_list = segments.segmentlist()
for key, seglist in self.segment_dict.items():
newsegment_list = segments.segmentlist()
for seg in seglist:
if abs(seg) > minSegLength:
newsegment_list.append(seg)
newsegment_list.coalesce()
self.segment_dict[key] = newsegment_list
self.to_segment_xml(override_file_if_exists=True) | def function[remove_short_sci_segs, parameter[self, minSegLength]]:
constant[
Function to remove all science segments
shorter than a specific length. Also updates the file on disk to remove
these segments.
Parameters
-----------
minSegLength : int
Maximum length of science segments. Segments shorter than this will
be removed.
]
variable[newsegment_list] assign[=] call[name[segments].segmentlist, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da2041d8a90>, <ast.Name object at 0x7da2041da0e0>]]] in starred[call[name[self].segment_dict.items, parameter[]]] begin[:]
variable[newsegment_list] assign[=] call[name[segments].segmentlist, parameter[]]
for taget[name[seg]] in starred[name[seglist]] begin[:]
if compare[call[name[abs], parameter[name[seg]]] greater[>] name[minSegLength]] begin[:]
call[name[newsegment_list].append, parameter[name[seg]]]
call[name[newsegment_list].coalesce, parameter[]]
call[name[self].segment_dict][name[key]] assign[=] name[newsegment_list]
call[name[self].to_segment_xml, parameter[]] | keyword[def] identifier[remove_short_sci_segs] ( identifier[self] , identifier[minSegLength] ):
literal[string]
identifier[newsegment_list] = identifier[segments] . identifier[segmentlist] ()
keyword[for] identifier[key] , identifier[seglist] keyword[in] identifier[self] . identifier[segment_dict] . identifier[items] ():
identifier[newsegment_list] = identifier[segments] . identifier[segmentlist] ()
keyword[for] identifier[seg] keyword[in] identifier[seglist] :
keyword[if] identifier[abs] ( identifier[seg] )> identifier[minSegLength] :
identifier[newsegment_list] . identifier[append] ( identifier[seg] )
identifier[newsegment_list] . identifier[coalesce] ()
identifier[self] . identifier[segment_dict] [ identifier[key] ]= identifier[newsegment_list]
identifier[self] . identifier[to_segment_xml] ( identifier[override_file_if_exists] = keyword[True] ) | def remove_short_sci_segs(self, minSegLength):
"""
Function to remove all science segments
shorter than a specific length. Also updates the file on disk to remove
these segments.
Parameters
-----------
minSegLength : int
Maximum length of science segments. Segments shorter than this will
be removed.
"""
newsegment_list = segments.segmentlist()
for (key, seglist) in self.segment_dict.items():
newsegment_list = segments.segmentlist()
for seg in seglist:
if abs(seg) > minSegLength:
newsegment_list.append(seg) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['seg']]
newsegment_list.coalesce()
self.segment_dict[key] = newsegment_list # depends on [control=['for'], data=[]]
self.to_segment_xml(override_file_if_exists=True) |
def delete(self, exchange='', if_unused=False):
"""Delete an Exchange.
:param str exchange: Exchange name
:param bool if_unused: Delete only if unused
:raises AMQPInvalidArgument: Invalid Parameters
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQPConnectionError: Raises if the connection
encountered an error.
:rtype: dict
"""
if not compatibility.is_string(exchange):
raise AMQPInvalidArgument('exchange should be a string')
delete_frame = pamqp_exchange.Delete(exchange=exchange,
if_unused=if_unused)
return self._channel.rpc_request(delete_frame) | def function[delete, parameter[self, exchange, if_unused]]:
constant[Delete an Exchange.
:param str exchange: Exchange name
:param bool if_unused: Delete only if unused
:raises AMQPInvalidArgument: Invalid Parameters
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQPConnectionError: Raises if the connection
encountered an error.
:rtype: dict
]
if <ast.UnaryOp object at 0x7da1b2346aa0> begin[:]
<ast.Raise object at 0x7da1b23474f0>
variable[delete_frame] assign[=] call[name[pamqp_exchange].Delete, parameter[]]
return[call[name[self]._channel.rpc_request, parameter[name[delete_frame]]]] | keyword[def] identifier[delete] ( identifier[self] , identifier[exchange] = literal[string] , identifier[if_unused] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[compatibility] . identifier[is_string] ( identifier[exchange] ):
keyword[raise] identifier[AMQPInvalidArgument] ( literal[string] )
identifier[delete_frame] = identifier[pamqp_exchange] . identifier[Delete] ( identifier[exchange] = identifier[exchange] ,
identifier[if_unused] = identifier[if_unused] )
keyword[return] identifier[self] . identifier[_channel] . identifier[rpc_request] ( identifier[delete_frame] ) | def delete(self, exchange='', if_unused=False):
"""Delete an Exchange.
:param str exchange: Exchange name
:param bool if_unused: Delete only if unused
:raises AMQPInvalidArgument: Invalid Parameters
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQPConnectionError: Raises if the connection
encountered an error.
:rtype: dict
"""
if not compatibility.is_string(exchange):
raise AMQPInvalidArgument('exchange should be a string') # depends on [control=['if'], data=[]]
delete_frame = pamqp_exchange.Delete(exchange=exchange, if_unused=if_unused)
return self._channel.rpc_request(delete_frame) |
def update_arg(self, arg, if_existent=None, **kwargs):
"""
Update the `add_argument` data for the given parameter
Parameters
----------
arg: str
The name of the function argument
if_existent: bool or None
If True, the argument is updated. If None (default), the argument
is only updated, if it exists. Otherwise, if False, the given
``**kwargs`` are only used if the argument is not yet existing
``**kwargs``
The keyword arguments any parameter for the
:meth:`argparse.ArgumentParser.add_argument` method
"""
if if_existent or (if_existent is None and
arg in self.unfinished_arguments):
self.unfinished_arguments[arg].update(kwargs)
elif not if_existent and if_existent is not None:
self.unfinished_arguments.setdefault(arg, kwargs) | def function[update_arg, parameter[self, arg, if_existent]]:
constant[
Update the `add_argument` data for the given parameter
Parameters
----------
arg: str
The name of the function argument
if_existent: bool or None
If True, the argument is updated. If None (default), the argument
is only updated, if it exists. Otherwise, if False, the given
``**kwargs`` are only used if the argument is not yet existing
``**kwargs``
The keyword arguments any parameter for the
:meth:`argparse.ArgumentParser.add_argument` method
]
if <ast.BoolOp object at 0x7da2054a48b0> begin[:]
call[call[name[self].unfinished_arguments][name[arg]].update, parameter[name[kwargs]]] | keyword[def] identifier[update_arg] ( identifier[self] , identifier[arg] , identifier[if_existent] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[if_existent] keyword[or] ( identifier[if_existent] keyword[is] keyword[None] keyword[and]
identifier[arg] keyword[in] identifier[self] . identifier[unfinished_arguments] ):
identifier[self] . identifier[unfinished_arguments] [ identifier[arg] ]. identifier[update] ( identifier[kwargs] )
keyword[elif] keyword[not] identifier[if_existent] keyword[and] identifier[if_existent] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[unfinished_arguments] . identifier[setdefault] ( identifier[arg] , identifier[kwargs] ) | def update_arg(self, arg, if_existent=None, **kwargs):
"""
Update the `add_argument` data for the given parameter
Parameters
----------
arg: str
The name of the function argument
if_existent: bool or None
If True, the argument is updated. If None (default), the argument
is only updated, if it exists. Otherwise, if False, the given
``**kwargs`` are only used if the argument is not yet existing
``**kwargs``
The keyword arguments any parameter for the
:meth:`argparse.ArgumentParser.add_argument` method
"""
if if_existent or (if_existent is None and arg in self.unfinished_arguments):
self.unfinished_arguments[arg].update(kwargs) # depends on [control=['if'], data=[]]
elif not if_existent and if_existent is not None:
self.unfinished_arguments.setdefault(arg, kwargs) # depends on [control=['if'], data=[]] |
def make_chunk_iter(
stream, separator, limit=None, buffer_size=10 * 1024, cap_at_buffer=False
):
"""Works like :func:`make_line_iter` but accepts a separator
which divides chunks. If you want newline based processing
you should use :func:`make_line_iter` instead as it
supports arbitrary newline markers.
.. versionadded:: 0.8
.. versionadded:: 0.9
added support for iterators as input stream.
.. versionadded:: 0.11.10
added support for the `cap_at_buffer` parameter.
:param stream: the stream or iterate to iterate over.
:param separator: the separator that divides chunks.
:param limit: the limit in bytes for the stream. (Usually
content length. Not necessary if the `stream`
is otherwise already limited).
:param buffer_size: The optional buffer size.
:param cap_at_buffer: if this is set chunks are split if they are longer
than the buffer size. Internally this is implemented
that the buffer size might be exhausted by a factor
of two however.
"""
_iter = _make_chunk_iter(stream, limit, buffer_size)
first_item = next(_iter, "")
if not first_item:
return
_iter = chain((first_item,), _iter)
if isinstance(first_item, text_type):
separator = to_unicode(separator)
_split = re.compile(r"(%s)" % re.escape(separator)).split
_join = u"".join
else:
separator = to_bytes(separator)
_split = re.compile(b"(" + re.escape(separator) + b")").split
_join = b"".join
buffer = []
while 1:
new_data = next(_iter, "")
if not new_data:
break
chunks = _split(new_data)
new_buf = []
buf_size = 0
for item in chain(buffer, chunks):
if item == separator:
yield _join(new_buf)
new_buf = []
buf_size = 0
else:
buf_size += len(item)
new_buf.append(item)
if cap_at_buffer and buf_size >= buffer_size:
rv = _join(new_buf)
while len(rv) >= buffer_size:
yield rv[:buffer_size]
rv = rv[buffer_size:]
new_buf = [rv]
buf_size = len(rv)
buffer = new_buf
if buffer:
yield _join(buffer) | def function[make_chunk_iter, parameter[stream, separator, limit, buffer_size, cap_at_buffer]]:
constant[Works like :func:`make_line_iter` but accepts a separator
which divides chunks. If you want newline based processing
you should use :func:`make_line_iter` instead as it
supports arbitrary newline markers.
.. versionadded:: 0.8
.. versionadded:: 0.9
added support for iterators as input stream.
.. versionadded:: 0.11.10
added support for the `cap_at_buffer` parameter.
:param stream: the stream or iterate to iterate over.
:param separator: the separator that divides chunks.
:param limit: the limit in bytes for the stream. (Usually
content length. Not necessary if the `stream`
is otherwise already limited).
:param buffer_size: The optional buffer size.
:param cap_at_buffer: if this is set chunks are split if they are longer
than the buffer size. Internally this is implemented
that the buffer size might be exhausted by a factor
of two however.
]
variable[_iter] assign[=] call[name[_make_chunk_iter], parameter[name[stream], name[limit], name[buffer_size]]]
variable[first_item] assign[=] call[name[next], parameter[name[_iter], constant[]]]
if <ast.UnaryOp object at 0x7da20c7c9810> begin[:]
return[None]
variable[_iter] assign[=] call[name[chain], parameter[tuple[[<ast.Name object at 0x7da20c7c9e10>]], name[_iter]]]
if call[name[isinstance], parameter[name[first_item], name[text_type]]] begin[:]
variable[separator] assign[=] call[name[to_unicode], parameter[name[separator]]]
variable[_split] assign[=] call[name[re].compile, parameter[binary_operation[constant[(%s)] <ast.Mod object at 0x7da2590d6920> call[name[re].escape, parameter[name[separator]]]]]].split
variable[_join] assign[=] constant[].join
variable[buffer] assign[=] list[[]]
while constant[1] begin[:]
variable[new_data] assign[=] call[name[next], parameter[name[_iter], constant[]]]
if <ast.UnaryOp object at 0x7da18f811f90> begin[:]
break
variable[chunks] assign[=] call[name[_split], parameter[name[new_data]]]
variable[new_buf] assign[=] list[[]]
variable[buf_size] assign[=] constant[0]
for taget[name[item]] in starred[call[name[chain], parameter[name[buffer], name[chunks]]]] begin[:]
if compare[name[item] equal[==] name[separator]] begin[:]
<ast.Yield object at 0x7da18f812fe0>
variable[new_buf] assign[=] list[[]]
variable[buf_size] assign[=] constant[0]
variable[buffer] assign[=] name[new_buf]
if name[buffer] begin[:]
<ast.Yield object at 0x7da18f8104c0> | keyword[def] identifier[make_chunk_iter] (
identifier[stream] , identifier[separator] , identifier[limit] = keyword[None] , identifier[buffer_size] = literal[int] * literal[int] , identifier[cap_at_buffer] = keyword[False]
):
literal[string]
identifier[_iter] = identifier[_make_chunk_iter] ( identifier[stream] , identifier[limit] , identifier[buffer_size] )
identifier[first_item] = identifier[next] ( identifier[_iter] , literal[string] )
keyword[if] keyword[not] identifier[first_item] :
keyword[return]
identifier[_iter] = identifier[chain] (( identifier[first_item] ,), identifier[_iter] )
keyword[if] identifier[isinstance] ( identifier[first_item] , identifier[text_type] ):
identifier[separator] = identifier[to_unicode] ( identifier[separator] )
identifier[_split] = identifier[re] . identifier[compile] ( literal[string] % identifier[re] . identifier[escape] ( identifier[separator] )). identifier[split]
identifier[_join] = literal[string] . identifier[join]
keyword[else] :
identifier[separator] = identifier[to_bytes] ( identifier[separator] )
identifier[_split] = identifier[re] . identifier[compile] ( literal[string] + identifier[re] . identifier[escape] ( identifier[separator] )+ literal[string] ). identifier[split]
identifier[_join] = literal[string] . identifier[join]
identifier[buffer] =[]
keyword[while] literal[int] :
identifier[new_data] = identifier[next] ( identifier[_iter] , literal[string] )
keyword[if] keyword[not] identifier[new_data] :
keyword[break]
identifier[chunks] = identifier[_split] ( identifier[new_data] )
identifier[new_buf] =[]
identifier[buf_size] = literal[int]
keyword[for] identifier[item] keyword[in] identifier[chain] ( identifier[buffer] , identifier[chunks] ):
keyword[if] identifier[item] == identifier[separator] :
keyword[yield] identifier[_join] ( identifier[new_buf] )
identifier[new_buf] =[]
identifier[buf_size] = literal[int]
keyword[else] :
identifier[buf_size] += identifier[len] ( identifier[item] )
identifier[new_buf] . identifier[append] ( identifier[item] )
keyword[if] identifier[cap_at_buffer] keyword[and] identifier[buf_size] >= identifier[buffer_size] :
identifier[rv] = identifier[_join] ( identifier[new_buf] )
keyword[while] identifier[len] ( identifier[rv] )>= identifier[buffer_size] :
keyword[yield] identifier[rv] [: identifier[buffer_size] ]
identifier[rv] = identifier[rv] [ identifier[buffer_size] :]
identifier[new_buf] =[ identifier[rv] ]
identifier[buf_size] = identifier[len] ( identifier[rv] )
identifier[buffer] = identifier[new_buf]
keyword[if] identifier[buffer] :
keyword[yield] identifier[_join] ( identifier[buffer] ) | def make_chunk_iter(stream, separator, limit=None, buffer_size=10 * 1024, cap_at_buffer=False):
"""Works like :func:`make_line_iter` but accepts a separator
which divides chunks. If you want newline based processing
you should use :func:`make_line_iter` instead as it
supports arbitrary newline markers.
.. versionadded:: 0.8
.. versionadded:: 0.9
added support for iterators as input stream.
.. versionadded:: 0.11.10
added support for the `cap_at_buffer` parameter.
:param stream: the stream or iterate to iterate over.
:param separator: the separator that divides chunks.
:param limit: the limit in bytes for the stream. (Usually
content length. Not necessary if the `stream`
is otherwise already limited).
:param buffer_size: The optional buffer size.
:param cap_at_buffer: if this is set chunks are split if they are longer
than the buffer size. Internally this is implemented
that the buffer size might be exhausted by a factor
of two however.
"""
_iter = _make_chunk_iter(stream, limit, buffer_size)
first_item = next(_iter, '')
if not first_item:
return # depends on [control=['if'], data=[]]
_iter = chain((first_item,), _iter)
if isinstance(first_item, text_type):
separator = to_unicode(separator)
_split = re.compile('(%s)' % re.escape(separator)).split
_join = u''.join # depends on [control=['if'], data=[]]
else:
separator = to_bytes(separator)
_split = re.compile(b'(' + re.escape(separator) + b')').split
_join = b''.join
buffer = []
while 1:
new_data = next(_iter, '')
if not new_data:
break # depends on [control=['if'], data=[]]
chunks = _split(new_data)
new_buf = []
buf_size = 0
for item in chain(buffer, chunks):
if item == separator:
yield _join(new_buf)
new_buf = []
buf_size = 0 # depends on [control=['if'], data=[]]
else:
buf_size += len(item)
new_buf.append(item)
if cap_at_buffer and buf_size >= buffer_size:
rv = _join(new_buf)
while len(rv) >= buffer_size:
yield rv[:buffer_size]
rv = rv[buffer_size:] # depends on [control=['while'], data=['buffer_size']]
new_buf = [rv]
buf_size = len(rv) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']]
buffer = new_buf # depends on [control=['while'], data=[]]
if buffer:
yield _join(buffer) # depends on [control=['if'], data=[]] |
def install(level=None, **kw):
"""
Enable colored terminal output for Python's :mod:`logging` module.
:param level: The default logging level (an integer or a string with a
level name, defaults to :data:`DEFAULT_LOG_LEVEL`).
:param logger: The logger to which the stream handler should be attached (a
:class:`~logging.Logger` object, defaults to the root logger).
:param fmt: Set the logging format (a string like those accepted by
:class:`~logging.Formatter`, defaults to
:data:`DEFAULT_LOG_FORMAT`).
:param datefmt: Set the date/time format (a string, defaults to
:data:`DEFAULT_DATE_FORMAT`).
:param style: One of the characters ``%``, ``{`` or ``$`` (defaults to
:data:`DEFAULT_FORMAT_STYLE`). See the documentation of the
:class:`python3:logging.Formatter` class in Python 3.2+. On
older Python versions only ``%`` is supported.
:param milliseconds: :data:`True` to show milliseconds like :mod:`logging`
does by default, :data:`False` to hide milliseconds
(the default is :data:`False`, see `#16`_).
:param level_styles: A dictionary with custom level styles (defaults to
:data:`DEFAULT_LEVEL_STYLES`).
:param field_styles: A dictionary with custom field styles (defaults to
:data:`DEFAULT_FIELD_STYLES`).
:param stream: The stream where log messages should be written to (a
file-like object). This defaults to :data:`None` which
means :class:`StandardErrorHandler` is used.
:param isatty: :data:`True` to use a :class:`ColoredFormatter`,
:data:`False` to use a normal :class:`~logging.Formatter`
(defaults to auto-detection using
:func:`~humanfriendly.terminal.terminal_supports_colors()`).
:param reconfigure: If :data:`True` (the default) multiple calls to
:func:`coloredlogs.install()` will each override
the previous configuration.
:param use_chroot: Refer to :class:`HostNameFilter`.
:param programname: Refer to :class:`ProgramNameFilter`.
:param syslog: If :data:`True` then :func:`.enable_system_logging()` will
be called without arguments (defaults to :data:`False`). The
`syslog` argument may also be a number or string, in this
case it is assumed to be a logging level which is passed on
to :func:`.enable_system_logging()`.
The :func:`coloredlogs.install()` function is similar to
:func:`logging.basicConfig()`, both functions take a lot of optional
keyword arguments but try to do the right thing by default:
1. If `reconfigure` is :data:`True` (it is by default) and an existing
:class:`~logging.StreamHandler` is found that is connected to either
:data:`~sys.stdout` or :data:`~sys.stderr` the handler will be removed.
This means that first calling :func:`logging.basicConfig()` and then
calling :func:`coloredlogs.install()` will replace the stream handler
instead of adding a duplicate stream handler. If `reconfigure` is
:data:`False` and an existing handler is found no further steps are
taken (to avoid installing a duplicate stream handler).
2. A :class:`~logging.StreamHandler` is created and connected to the stream
given by the `stream` keyword argument (:data:`sys.stderr` by
default). The stream handler's level is set to the value of the `level`
keyword argument.
3. A :class:`ColoredFormatter` is created if the `isatty` keyword argument
allows it (or auto-detection allows it), otherwise a normal
:class:`~logging.Formatter` is created. The formatter is initialized
with the `fmt` and `datefmt` keyword arguments (or their computed
defaults).
4. :func:`HostNameFilter.install()` and :func:`ProgramNameFilter.install()`
are called to enable the use of additional fields in the log format.
5. If the logger's level is too restrictive it is relaxed (refer to `notes
about log levels`_ for details).
6. The formatter is added to the handler and the handler is added to the
logger.
.. _#16: https://github.com/xolox/python-coloredlogs/issues/16
"""
logger = kw.get('logger') or logging.getLogger()
reconfigure = kw.get('reconfigure', True)
stream = kw.get('stream', None)
style = check_style(kw.get('style') or DEFAULT_FORMAT_STYLE)
# Get the log level from an argument, environment variable or default and
# convert the names of log levels to numbers to enable numeric comparison.
if level is None:
level = os.environ.get('COLOREDLOGS_LOG_LEVEL', DEFAULT_LOG_LEVEL)
level = level_to_number(level)
# Remove any existing stream handler that writes to stdout or stderr, even
# if the stream handler wasn't created by coloredlogs because multiple
# stream handlers (in the same hierarchy) writing to stdout or stderr would
# create duplicate output. `None' is a synonym for the possibly dynamic
# value of the stderr attribute of the sys module.
match_streams = ([sys.stdout, sys.stderr]
if stream in [sys.stdout, sys.stderr, None]
else [stream])
match_handler = lambda handler: match_stream_handler(handler, match_streams)
handler, logger = replace_handler(logger, match_handler, reconfigure)
# Make sure reconfiguration is allowed or not relevant.
if not (handler and not reconfigure):
# Make it easy to enable system logging.
syslog_enabled = kw.get('syslog')
# We ignore the value `None' because it means the caller didn't opt in
# to system logging and `False' because it means the caller explicitly
# opted out of system logging.
#
# We never enable system logging on Windows because it is my impression
# that SysLogHandler isn't intended to be used on Windows; I've had
# reports of coloredlogs spewing extremely verbose errno 10057 warning
# messages to the console (once for each log message I suppose).
if syslog_enabled not in (None, False) and not WINDOWS:
from coloredlogs.syslog import enable_system_logging
if syslog_enabled is True:
# If the caller passed syslog=True then we leave the choice of
# default log level up to the coloredlogs.syslog module.
enable_system_logging()
else:
# Values other than (None, True, False) are assumed to
# represent a logging level for system logging.
enable_system_logging(level=syslog_enabled)
# Figure out whether we can use ANSI escape sequences.
use_colors = kw.get('isatty', None)
if use_colors or use_colors is None:
if NEED_COLORAMA:
try:
# On Windows we can only use ANSI escape
# sequences if Colorama is available.
import colorama
colorama.init()
use_colors = True
except ImportError:
# If Colorama isn't available then we specifically
# shouldn't emit ANSI escape sequences!
use_colors = False
elif use_colors is None:
# Auto-detect terminal support on other platforms.
use_colors = terminal_supports_colors(stream)
# Create a stream handler.
handler = logging.StreamHandler(stream) if stream else StandardErrorHandler()
handler.setLevel(level)
# Prepare the arguments to the formatter, allowing the caller to
# customize the values of `fmt', `datefmt' and `style' as desired.
formatter_options = dict(fmt=kw.get('fmt'), datefmt=kw.get('datefmt'))
# Only pass the `style' argument to the formatter when the caller
# provided an alternative logging format style. This prevents
# TypeError exceptions on Python versions before 3.2.
if style != DEFAULT_FORMAT_STYLE:
formatter_options['style'] = style
# Come up with a default log format?
if not formatter_options['fmt']:
# Use the log format defined by the environment variable
# $COLOREDLOGS_LOG_FORMAT or fall back to the default.
formatter_options['fmt'] = os.environ.get('COLOREDLOGS_LOG_FORMAT') or DEFAULT_LOG_FORMAT
# If the caller didn't specify a date/time format we'll use the format
# defined by the environment variable $COLOREDLOGS_DATE_FORMAT (or fall
# back to the default).
if not formatter_options['datefmt']:
formatter_options['datefmt'] = os.environ.get('COLOREDLOGS_DATE_FORMAT') or DEFAULT_DATE_FORMAT
# Python's logging module shows milliseconds by default through special
# handling in the logging.Formatter.formatTime() method [1]. Because
# coloredlogs always defines a `datefmt' it bypasses this special
# handling, which is fine because ever since publishing coloredlogs
# I've never needed millisecond precision ;-). However there are users
# of coloredlogs that do want milliseconds to be shown [2] so we
# provide a shortcut to make it easy.
#
# [1] https://stackoverflow.com/questions/6290739/python-logging-use-milliseconds-in-time-format
# [2] https://github.com/xolox/python-coloredlogs/issues/16
if kw.get('milliseconds'):
parser = FormatStringParser(style=style)
if not (parser.contains_field(formatter_options['fmt'], 'msecs') or
'%f' in formatter_options['datefmt']):
pattern = parser.get_pattern('asctime')
replacements = {'%': '%(msecs)03d', '{': '{msecs:03}', '$': '${msecs}'}
formatter_options['fmt'] = pattern.sub(
r'\g<0>,' + replacements[style],
formatter_options['fmt'],
)
# Do we need to make %(hostname) available to the formatter?
HostNameFilter.install(
fmt=formatter_options['fmt'],
handler=handler,
style=style,
use_chroot=kw.get('use_chroot', True),
)
# Do we need to make %(programname) available to the formatter?
ProgramNameFilter.install(
fmt=formatter_options['fmt'],
handler=handler,
programname=kw.get('programname'),
style=style,
)
# Inject additional formatter arguments specific to ColoredFormatter?
if use_colors:
for name, environment_name in (('field_styles', 'COLOREDLOGS_FIELD_STYLES'),
('level_styles', 'COLOREDLOGS_LEVEL_STYLES')):
value = kw.get(name)
if value is None:
# If no styles have been specified we'll fall back
# to the styles defined by the environment variable.
environment_value = os.environ.get(environment_name)
if environment_value is not None:
value = parse_encoded_styles(environment_value)
if value is not None:
formatter_options[name] = value
# Create a (possibly colored) formatter.
formatter_type = ColoredFormatter if use_colors else BasicFormatter
handler.setFormatter(formatter_type(**formatter_options))
# Adjust the level of the selected logger.
adjust_level(logger, level)
# Install the stream handler.
logger.addHandler(handler) | def function[install, parameter[level]]:
constant[
Enable colored terminal output for Python's :mod:`logging` module.
:param level: The default logging level (an integer or a string with a
level name, defaults to :data:`DEFAULT_LOG_LEVEL`).
:param logger: The logger to which the stream handler should be attached (a
:class:`~logging.Logger` object, defaults to the root logger).
:param fmt: Set the logging format (a string like those accepted by
:class:`~logging.Formatter`, defaults to
:data:`DEFAULT_LOG_FORMAT`).
:param datefmt: Set the date/time format (a string, defaults to
:data:`DEFAULT_DATE_FORMAT`).
:param style: One of the characters ``%``, ``{`` or ``$`` (defaults to
:data:`DEFAULT_FORMAT_STYLE`). See the documentation of the
:class:`python3:logging.Formatter` class in Python 3.2+. On
older Python versions only ``%`` is supported.
:param milliseconds: :data:`True` to show milliseconds like :mod:`logging`
does by default, :data:`False` to hide milliseconds
(the default is :data:`False`, see `#16`_).
:param level_styles: A dictionary with custom level styles (defaults to
:data:`DEFAULT_LEVEL_STYLES`).
:param field_styles: A dictionary with custom field styles (defaults to
:data:`DEFAULT_FIELD_STYLES`).
:param stream: The stream where log messages should be written to (a
file-like object). This defaults to :data:`None` which
means :class:`StandardErrorHandler` is used.
:param isatty: :data:`True` to use a :class:`ColoredFormatter`,
:data:`False` to use a normal :class:`~logging.Formatter`
(defaults to auto-detection using
:func:`~humanfriendly.terminal.terminal_supports_colors()`).
:param reconfigure: If :data:`True` (the default) multiple calls to
:func:`coloredlogs.install()` will each override
the previous configuration.
:param use_chroot: Refer to :class:`HostNameFilter`.
:param programname: Refer to :class:`ProgramNameFilter`.
:param syslog: If :data:`True` then :func:`.enable_system_logging()` will
be called without arguments (defaults to :data:`False`). The
`syslog` argument may also be a number or string, in this
case it is assumed to be a logging level which is passed on
to :func:`.enable_system_logging()`.
The :func:`coloredlogs.install()` function is similar to
:func:`logging.basicConfig()`, both functions take a lot of optional
keyword arguments but try to do the right thing by default:
1. If `reconfigure` is :data:`True` (it is by default) and an existing
:class:`~logging.StreamHandler` is found that is connected to either
:data:`~sys.stdout` or :data:`~sys.stderr` the handler will be removed.
This means that first calling :func:`logging.basicConfig()` and then
calling :func:`coloredlogs.install()` will replace the stream handler
instead of adding a duplicate stream handler. If `reconfigure` is
:data:`False` and an existing handler is found no further steps are
taken (to avoid installing a duplicate stream handler).
2. A :class:`~logging.StreamHandler` is created and connected to the stream
given by the `stream` keyword argument (:data:`sys.stderr` by
default). The stream handler's level is set to the value of the `level`
keyword argument.
3. A :class:`ColoredFormatter` is created if the `isatty` keyword argument
allows it (or auto-detection allows it), otherwise a normal
:class:`~logging.Formatter` is created. The formatter is initialized
with the `fmt` and `datefmt` keyword arguments (or their computed
defaults).
4. :func:`HostNameFilter.install()` and :func:`ProgramNameFilter.install()`
are called to enable the use of additional fields in the log format.
5. If the logger's level is too restrictive it is relaxed (refer to `notes
about log levels`_ for details).
6. The formatter is added to the handler and the handler is added to the
logger.
.. _#16: https://github.com/xolox/python-coloredlogs/issues/16
]
variable[logger] assign[=] <ast.BoolOp object at 0x7da1b060fdc0>
variable[reconfigure] assign[=] call[name[kw].get, parameter[constant[reconfigure], constant[True]]]
variable[stream] assign[=] call[name[kw].get, parameter[constant[stream], constant[None]]]
variable[style] assign[=] call[name[check_style], parameter[<ast.BoolOp object at 0x7da1b060f8e0>]]
if compare[name[level] is constant[None]] begin[:]
variable[level] assign[=] call[name[os].environ.get, parameter[constant[COLOREDLOGS_LOG_LEVEL], name[DEFAULT_LOG_LEVEL]]]
variable[level] assign[=] call[name[level_to_number], parameter[name[level]]]
variable[match_streams] assign[=] <ast.IfExp object at 0x7da1b060f430>
variable[match_handler] assign[=] <ast.Lambda object at 0x7da1b060f0d0>
<ast.Tuple object at 0x7da1b060ef50> assign[=] call[name[replace_handler], parameter[name[logger], name[match_handler], name[reconfigure]]]
if <ast.UnaryOp object at 0x7da1b060eda0> begin[:]
variable[syslog_enabled] assign[=] call[name[kw].get, parameter[constant[syslog]]]
if <ast.BoolOp object at 0x7da1b060eb60> begin[:]
from relative_module[coloredlogs.syslog] import module[enable_system_logging]
if compare[name[syslog_enabled] is constant[True]] begin[:]
call[name[enable_system_logging], parameter[]]
variable[use_colors] assign[=] call[name[kw].get, parameter[constant[isatty], constant[None]]]
if <ast.BoolOp object at 0x7da1b060d390> begin[:]
if name[NEED_COLORAMA] begin[:]
<ast.Try object at 0x7da1b060d240>
variable[handler] assign[=] <ast.IfExp object at 0x7da1b060cd00>
call[name[handler].setLevel, parameter[name[level]]]
variable[formatter_options] assign[=] call[name[dict], parameter[]]
if compare[name[style] not_equal[!=] name[DEFAULT_FORMAT_STYLE]] begin[:]
call[name[formatter_options]][constant[style]] assign[=] name[style]
if <ast.UnaryOp object at 0x7da1b060c5e0> begin[:]
call[name[formatter_options]][constant[fmt]] assign[=] <ast.BoolOp object at 0x7da1b060c460>
if <ast.UnaryOp object at 0x7da1b060c2e0> begin[:]
call[name[formatter_options]][constant[datefmt]] assign[=] <ast.BoolOp object at 0x7da1b060c160>
if call[name[kw].get, parameter[constant[milliseconds]]] begin[:]
variable[parser] assign[=] call[name[FormatStringParser], parameter[]]
if <ast.UnaryOp object at 0x7da1b068bd90> begin[:]
variable[pattern] assign[=] call[name[parser].get_pattern, parameter[constant[asctime]]]
variable[replacements] assign[=] dictionary[[<ast.Constant object at 0x7da1b068b940>, <ast.Constant object at 0x7da1b068b910>, <ast.Constant object at 0x7da1b068b8e0>], [<ast.Constant object at 0x7da1b068b8b0>, <ast.Constant object at 0x7da1b068b880>, <ast.Constant object at 0x7da1b068b850>]]
call[name[formatter_options]][constant[fmt]] assign[=] call[name[pattern].sub, parameter[binary_operation[constant[\g<0>,] + call[name[replacements]][name[style]]], call[name[formatter_options]][constant[fmt]]]]
call[name[HostNameFilter].install, parameter[]]
call[name[ProgramNameFilter].install, parameter[]]
if name[use_colors] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b0688f40>, <ast.Name object at 0x7da1b0688f70>]]] in starred[tuple[[<ast.Tuple object at 0x7da1b0688fd0>, <ast.Tuple object at 0x7da1b0689060>]]] begin[:]
variable[value] assign[=] call[name[kw].get, parameter[name[name]]]
if compare[name[value] is constant[None]] begin[:]
variable[environment_value] assign[=] call[name[os].environ.get, parameter[name[environment_name]]]
if compare[name[environment_value] is_not constant[None]] begin[:]
variable[value] assign[=] call[name[parse_encoded_styles], parameter[name[environment_value]]]
if compare[name[value] is_not constant[None]] begin[:]
call[name[formatter_options]][name[name]] assign[=] name[value]
variable[formatter_type] assign[=] <ast.IfExp object at 0x7da1b068a2c0>
call[name[handler].setFormatter, parameter[call[name[formatter_type], parameter[]]]]
call[name[adjust_level], parameter[name[logger], name[level]]]
call[name[logger].addHandler, parameter[name[handler]]] | keyword[def] identifier[install] ( identifier[level] = keyword[None] ,** identifier[kw] ):
literal[string]
identifier[logger] = identifier[kw] . identifier[get] ( literal[string] ) keyword[or] identifier[logging] . identifier[getLogger] ()
identifier[reconfigure] = identifier[kw] . identifier[get] ( literal[string] , keyword[True] )
identifier[stream] = identifier[kw] . identifier[get] ( literal[string] , keyword[None] )
identifier[style] = identifier[check_style] ( identifier[kw] . identifier[get] ( literal[string] ) keyword[or] identifier[DEFAULT_FORMAT_STYLE] )
keyword[if] identifier[level] keyword[is] keyword[None] :
identifier[level] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] , identifier[DEFAULT_LOG_LEVEL] )
identifier[level] = identifier[level_to_number] ( identifier[level] )
identifier[match_streams] =([ identifier[sys] . identifier[stdout] , identifier[sys] . identifier[stderr] ]
keyword[if] identifier[stream] keyword[in] [ identifier[sys] . identifier[stdout] , identifier[sys] . identifier[stderr] , keyword[None] ]
keyword[else] [ identifier[stream] ])
identifier[match_handler] = keyword[lambda] identifier[handler] : identifier[match_stream_handler] ( identifier[handler] , identifier[match_streams] )
identifier[handler] , identifier[logger] = identifier[replace_handler] ( identifier[logger] , identifier[match_handler] , identifier[reconfigure] )
keyword[if] keyword[not] ( identifier[handler] keyword[and] keyword[not] identifier[reconfigure] ):
identifier[syslog_enabled] = identifier[kw] . identifier[get] ( literal[string] )
keyword[if] identifier[syslog_enabled] keyword[not] keyword[in] ( keyword[None] , keyword[False] ) keyword[and] keyword[not] identifier[WINDOWS] :
keyword[from] identifier[coloredlogs] . identifier[syslog] keyword[import] identifier[enable_system_logging]
keyword[if] identifier[syslog_enabled] keyword[is] keyword[True] :
identifier[enable_system_logging] ()
keyword[else] :
identifier[enable_system_logging] ( identifier[level] = identifier[syslog_enabled] )
identifier[use_colors] = identifier[kw] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[use_colors] keyword[or] identifier[use_colors] keyword[is] keyword[None] :
keyword[if] identifier[NEED_COLORAMA] :
keyword[try] :
keyword[import] identifier[colorama]
identifier[colorama] . identifier[init] ()
identifier[use_colors] = keyword[True]
keyword[except] identifier[ImportError] :
identifier[use_colors] = keyword[False]
keyword[elif] identifier[use_colors] keyword[is] keyword[None] :
identifier[use_colors] = identifier[terminal_supports_colors] ( identifier[stream] )
identifier[handler] = identifier[logging] . identifier[StreamHandler] ( identifier[stream] ) keyword[if] identifier[stream] keyword[else] identifier[StandardErrorHandler] ()
identifier[handler] . identifier[setLevel] ( identifier[level] )
identifier[formatter_options] = identifier[dict] ( identifier[fmt] = identifier[kw] . identifier[get] ( literal[string] ), identifier[datefmt] = identifier[kw] . identifier[get] ( literal[string] ))
keyword[if] identifier[style] != identifier[DEFAULT_FORMAT_STYLE] :
identifier[formatter_options] [ literal[string] ]= identifier[style]
keyword[if] keyword[not] identifier[formatter_options] [ literal[string] ]:
identifier[formatter_options] [ literal[string] ]= identifier[os] . identifier[environ] . identifier[get] ( literal[string] ) keyword[or] identifier[DEFAULT_LOG_FORMAT]
keyword[if] keyword[not] identifier[formatter_options] [ literal[string] ]:
identifier[formatter_options] [ literal[string] ]= identifier[os] . identifier[environ] . identifier[get] ( literal[string] ) keyword[or] identifier[DEFAULT_DATE_FORMAT]
keyword[if] identifier[kw] . identifier[get] ( literal[string] ):
identifier[parser] = identifier[FormatStringParser] ( identifier[style] = identifier[style] )
keyword[if] keyword[not] ( identifier[parser] . identifier[contains_field] ( identifier[formatter_options] [ literal[string] ], literal[string] ) keyword[or]
literal[string] keyword[in] identifier[formatter_options] [ literal[string] ]):
identifier[pattern] = identifier[parser] . identifier[get_pattern] ( literal[string] )
identifier[replacements] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] }
identifier[formatter_options] [ literal[string] ]= identifier[pattern] . identifier[sub] (
literal[string] + identifier[replacements] [ identifier[style] ],
identifier[formatter_options] [ literal[string] ],
)
identifier[HostNameFilter] . identifier[install] (
identifier[fmt] = identifier[formatter_options] [ literal[string] ],
identifier[handler] = identifier[handler] ,
identifier[style] = identifier[style] ,
identifier[use_chroot] = identifier[kw] . identifier[get] ( literal[string] , keyword[True] ),
)
identifier[ProgramNameFilter] . identifier[install] (
identifier[fmt] = identifier[formatter_options] [ literal[string] ],
identifier[handler] = identifier[handler] ,
identifier[programname] = identifier[kw] . identifier[get] ( literal[string] ),
identifier[style] = identifier[style] ,
)
keyword[if] identifier[use_colors] :
keyword[for] identifier[name] , identifier[environment_name] keyword[in] (( literal[string] , literal[string] ),
( literal[string] , literal[string] )):
identifier[value] = identifier[kw] . identifier[get] ( identifier[name] )
keyword[if] identifier[value] keyword[is] keyword[None] :
identifier[environment_value] = identifier[os] . identifier[environ] . identifier[get] ( identifier[environment_name] )
keyword[if] identifier[environment_value] keyword[is] keyword[not] keyword[None] :
identifier[value] = identifier[parse_encoded_styles] ( identifier[environment_value] )
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
identifier[formatter_options] [ identifier[name] ]= identifier[value]
identifier[formatter_type] = identifier[ColoredFormatter] keyword[if] identifier[use_colors] keyword[else] identifier[BasicFormatter]
identifier[handler] . identifier[setFormatter] ( identifier[formatter_type] (** identifier[formatter_options] ))
identifier[adjust_level] ( identifier[logger] , identifier[level] )
identifier[logger] . identifier[addHandler] ( identifier[handler] ) | def install(level=None, **kw):
"""
Enable colored terminal output for Python's :mod:`logging` module.
:param level: The default logging level (an integer or a string with a
level name, defaults to :data:`DEFAULT_LOG_LEVEL`).
:param logger: The logger to which the stream handler should be attached (a
:class:`~logging.Logger` object, defaults to the root logger).
:param fmt: Set the logging format (a string like those accepted by
:class:`~logging.Formatter`, defaults to
:data:`DEFAULT_LOG_FORMAT`).
:param datefmt: Set the date/time format (a string, defaults to
:data:`DEFAULT_DATE_FORMAT`).
:param style: One of the characters ``%``, ``{`` or ``$`` (defaults to
:data:`DEFAULT_FORMAT_STYLE`). See the documentation of the
:class:`python3:logging.Formatter` class in Python 3.2+. On
older Python versions only ``%`` is supported.
:param milliseconds: :data:`True` to show milliseconds like :mod:`logging`
does by default, :data:`False` to hide milliseconds
(the default is :data:`False`, see `#16`_).
:param level_styles: A dictionary with custom level styles (defaults to
:data:`DEFAULT_LEVEL_STYLES`).
:param field_styles: A dictionary with custom field styles (defaults to
:data:`DEFAULT_FIELD_STYLES`).
:param stream: The stream where log messages should be written to (a
file-like object). This defaults to :data:`None` which
means :class:`StandardErrorHandler` is used.
:param isatty: :data:`True` to use a :class:`ColoredFormatter`,
:data:`False` to use a normal :class:`~logging.Formatter`
(defaults to auto-detection using
:func:`~humanfriendly.terminal.terminal_supports_colors()`).
:param reconfigure: If :data:`True` (the default) multiple calls to
:func:`coloredlogs.install()` will each override
the previous configuration.
:param use_chroot: Refer to :class:`HostNameFilter`.
:param programname: Refer to :class:`ProgramNameFilter`.
:param syslog: If :data:`True` then :func:`.enable_system_logging()` will
be called without arguments (defaults to :data:`False`). The
`syslog` argument may also be a number or string, in this
case it is assumed to be a logging level which is passed on
to :func:`.enable_system_logging()`.
The :func:`coloredlogs.install()` function is similar to
:func:`logging.basicConfig()`, both functions take a lot of optional
keyword arguments but try to do the right thing by default:
1. If `reconfigure` is :data:`True` (it is by default) and an existing
:class:`~logging.StreamHandler` is found that is connected to either
:data:`~sys.stdout` or :data:`~sys.stderr` the handler will be removed.
This means that first calling :func:`logging.basicConfig()` and then
calling :func:`coloredlogs.install()` will replace the stream handler
instead of adding a duplicate stream handler. If `reconfigure` is
:data:`False` and an existing handler is found no further steps are
taken (to avoid installing a duplicate stream handler).
2. A :class:`~logging.StreamHandler` is created and connected to the stream
given by the `stream` keyword argument (:data:`sys.stderr` by
default). The stream handler's level is set to the value of the `level`
keyword argument.
3. A :class:`ColoredFormatter` is created if the `isatty` keyword argument
allows it (or auto-detection allows it), otherwise a normal
:class:`~logging.Formatter` is created. The formatter is initialized
with the `fmt` and `datefmt` keyword arguments (or their computed
defaults).
4. :func:`HostNameFilter.install()` and :func:`ProgramNameFilter.install()`
are called to enable the use of additional fields in the log format.
5. If the logger's level is too restrictive it is relaxed (refer to `notes
about log levels`_ for details).
6. The formatter is added to the handler and the handler is added to the
logger.
.. _#16: https://github.com/xolox/python-coloredlogs/issues/16
"""
logger = kw.get('logger') or logging.getLogger()
reconfigure = kw.get('reconfigure', True)
stream = kw.get('stream', None)
style = check_style(kw.get('style') or DEFAULT_FORMAT_STYLE)
# Get the log level from an argument, environment variable or default and
# convert the names of log levels to numbers to enable numeric comparison.
if level is None:
level = os.environ.get('COLOREDLOGS_LOG_LEVEL', DEFAULT_LOG_LEVEL) # depends on [control=['if'], data=['level']]
level = level_to_number(level)
# Remove any existing stream handler that writes to stdout or stderr, even
# if the stream handler wasn't created by coloredlogs because multiple
# stream handlers (in the same hierarchy) writing to stdout or stderr would
# create duplicate output. `None' is a synonym for the possibly dynamic
# value of the stderr attribute of the sys module.
match_streams = [sys.stdout, sys.stderr] if stream in [sys.stdout, sys.stderr, None] else [stream]
match_handler = lambda handler: match_stream_handler(handler, match_streams)
(handler, logger) = replace_handler(logger, match_handler, reconfigure)
# Make sure reconfiguration is allowed or not relevant.
if not (handler and (not reconfigure)):
# Make it easy to enable system logging.
syslog_enabled = kw.get('syslog')
# We ignore the value `None' because it means the caller didn't opt in
# to system logging and `False' because it means the caller explicitly
# opted out of system logging.
#
# We never enable system logging on Windows because it is my impression
# that SysLogHandler isn't intended to be used on Windows; I've had
# reports of coloredlogs spewing extremely verbose errno 10057 warning
# messages to the console (once for each log message I suppose).
if syslog_enabled not in (None, False) and (not WINDOWS):
from coloredlogs.syslog import enable_system_logging
if syslog_enabled is True:
# If the caller passed syslog=True then we leave the choice of
# default log level up to the coloredlogs.syslog module.
enable_system_logging() # depends on [control=['if'], data=[]]
else:
# Values other than (None, True, False) are assumed to
# represent a logging level for system logging.
enable_system_logging(level=syslog_enabled) # depends on [control=['if'], data=[]]
# Figure out whether we can use ANSI escape sequences.
use_colors = kw.get('isatty', None)
if use_colors or use_colors is None:
if NEED_COLORAMA:
try:
# On Windows we can only use ANSI escape
# sequences if Colorama is available.
import colorama
colorama.init()
use_colors = True # depends on [control=['try'], data=[]]
except ImportError:
# If Colorama isn't available then we specifically
# shouldn't emit ANSI escape sequences!
use_colors = False # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
elif use_colors is None:
# Auto-detect terminal support on other platforms.
use_colors = terminal_supports_colors(stream) # depends on [control=['if'], data=['use_colors']] # depends on [control=['if'], data=[]]
# Create a stream handler.
handler = logging.StreamHandler(stream) if stream else StandardErrorHandler()
handler.setLevel(level)
# Prepare the arguments to the formatter, allowing the caller to
# customize the values of `fmt', `datefmt' and `style' as desired.
formatter_options = dict(fmt=kw.get('fmt'), datefmt=kw.get('datefmt'))
# Only pass the `style' argument to the formatter when the caller
# provided an alternative logging format style. This prevents
# TypeError exceptions on Python versions before 3.2.
if style != DEFAULT_FORMAT_STYLE:
formatter_options['style'] = style # depends on [control=['if'], data=['style']]
# Come up with a default log format?
if not formatter_options['fmt']:
# Use the log format defined by the environment variable
# $COLOREDLOGS_LOG_FORMAT or fall back to the default.
formatter_options['fmt'] = os.environ.get('COLOREDLOGS_LOG_FORMAT') or DEFAULT_LOG_FORMAT # depends on [control=['if'], data=[]]
# If the caller didn't specify a date/time format we'll use the format
# defined by the environment variable $COLOREDLOGS_DATE_FORMAT (or fall
# back to the default).
if not formatter_options['datefmt']:
formatter_options['datefmt'] = os.environ.get('COLOREDLOGS_DATE_FORMAT') or DEFAULT_DATE_FORMAT # depends on [control=['if'], data=[]]
# Python's logging module shows milliseconds by default through special
# handling in the logging.Formatter.formatTime() method [1]. Because
# coloredlogs always defines a `datefmt' it bypasses this special
# handling, which is fine because ever since publishing coloredlogs
# I've never needed millisecond precision ;-). However there are users
# of coloredlogs that do want milliseconds to be shown [2] so we
# provide a shortcut to make it easy.
#
# [1] https://stackoverflow.com/questions/6290739/python-logging-use-milliseconds-in-time-format
# [2] https://github.com/xolox/python-coloredlogs/issues/16
if kw.get('milliseconds'):
parser = FormatStringParser(style=style)
if not (parser.contains_field(formatter_options['fmt'], 'msecs') or '%f' in formatter_options['datefmt']):
pattern = parser.get_pattern('asctime')
replacements = {'%': '%(msecs)03d', '{': '{msecs:03}', '$': '${msecs}'}
formatter_options['fmt'] = pattern.sub('\\g<0>,' + replacements[style], formatter_options['fmt']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Do we need to make %(hostname) available to the formatter?
HostNameFilter.install(fmt=formatter_options['fmt'], handler=handler, style=style, use_chroot=kw.get('use_chroot', True))
# Do we need to make %(programname) available to the formatter?
ProgramNameFilter.install(fmt=formatter_options['fmt'], handler=handler, programname=kw.get('programname'), style=style)
# Inject additional formatter arguments specific to ColoredFormatter?
if use_colors:
for (name, environment_name) in (('field_styles', 'COLOREDLOGS_FIELD_STYLES'), ('level_styles', 'COLOREDLOGS_LEVEL_STYLES')):
value = kw.get(name)
if value is None:
# If no styles have been specified we'll fall back
# to the styles defined by the environment variable.
environment_value = os.environ.get(environment_name)
if environment_value is not None:
value = parse_encoded_styles(environment_value) # depends on [control=['if'], data=['environment_value']] # depends on [control=['if'], data=['value']]
if value is not None:
formatter_options[name] = value # depends on [control=['if'], data=['value']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
# Create a (possibly colored) formatter.
formatter_type = ColoredFormatter if use_colors else BasicFormatter
handler.setFormatter(formatter_type(**formatter_options))
# Adjust the level of the selected logger.
adjust_level(logger, level)
# Install the stream handler.
logger.addHandler(handler) # depends on [control=['if'], data=[]] |
def leave_transaction_management(using=None):
"""
Leaves transaction management for a running thread. A dirty flag is carried
over to the surrounding block, as a commit will commit all changes, even
those from outside. (Commits are on connection level.)
"""
if using is None:
for using in tldap.backend.connections:
connection = tldap.backend.connections[using]
connection.leave_transaction_management()
return
connection = tldap.backend.connections[using]
connection.leave_transaction_management() | def function[leave_transaction_management, parameter[using]]:
constant[
Leaves transaction management for a running thread. A dirty flag is carried
over to the surrounding block, as a commit will commit all changes, even
those from outside. (Commits are on connection level.)
]
if compare[name[using] is constant[None]] begin[:]
for taget[name[using]] in starred[name[tldap].backend.connections] begin[:]
variable[connection] assign[=] call[name[tldap].backend.connections][name[using]]
call[name[connection].leave_transaction_management, parameter[]]
return[None]
variable[connection] assign[=] call[name[tldap].backend.connections][name[using]]
call[name[connection].leave_transaction_management, parameter[]] | keyword[def] identifier[leave_transaction_management] ( identifier[using] = keyword[None] ):
literal[string]
keyword[if] identifier[using] keyword[is] keyword[None] :
keyword[for] identifier[using] keyword[in] identifier[tldap] . identifier[backend] . identifier[connections] :
identifier[connection] = identifier[tldap] . identifier[backend] . identifier[connections] [ identifier[using] ]
identifier[connection] . identifier[leave_transaction_management] ()
keyword[return]
identifier[connection] = identifier[tldap] . identifier[backend] . identifier[connections] [ identifier[using] ]
identifier[connection] . identifier[leave_transaction_management] () | def leave_transaction_management(using=None):
"""
Leaves transaction management for a running thread. A dirty flag is carried
over to the surrounding block, as a commit will commit all changes, even
those from outside. (Commits are on connection level.)
"""
if using is None:
for using in tldap.backend.connections:
connection = tldap.backend.connections[using]
connection.leave_transaction_management() # depends on [control=['for'], data=['using']]
return # depends on [control=['if'], data=['using']]
connection = tldap.backend.connections[using]
connection.leave_transaction_management() |
def python_to_euc(uni_char, as_bytes=False):
"""
Return EUC character from a Python Unicode character.
Converts a one character Python unicode string (e.g. u'\\u4e00') to the
corresponding EUC hex ('d2bb').
"""
euc = repr(uni_char.encode("gb2312"))[1:-1].replace("\\x", "").strip("'")
if as_bytes:
euc = euc.encode('utf-8')
assert isinstance(euc, bytes)
return euc | def function[python_to_euc, parameter[uni_char, as_bytes]]:
constant[
Return EUC character from a Python Unicode character.
Converts a one character Python unicode string (e.g. u'\u4e00') to the
corresponding EUC hex ('d2bb').
]
variable[euc] assign[=] call[call[call[call[name[repr], parameter[call[name[uni_char].encode, parameter[constant[gb2312]]]]]][<ast.Slice object at 0x7da1b1991210>].replace, parameter[constant[\x], constant[]]].strip, parameter[constant[']]]
if name[as_bytes] begin[:]
variable[euc] assign[=] call[name[euc].encode, parameter[constant[utf-8]]]
assert[call[name[isinstance], parameter[name[euc], name[bytes]]]]
return[name[euc]] | keyword[def] identifier[python_to_euc] ( identifier[uni_char] , identifier[as_bytes] = keyword[False] ):
literal[string]
identifier[euc] = identifier[repr] ( identifier[uni_char] . identifier[encode] ( literal[string] ))[ literal[int] :- literal[int] ]. identifier[replace] ( literal[string] , literal[string] ). identifier[strip] ( literal[string] )
keyword[if] identifier[as_bytes] :
identifier[euc] = identifier[euc] . identifier[encode] ( literal[string] )
keyword[assert] identifier[isinstance] ( identifier[euc] , identifier[bytes] )
keyword[return] identifier[euc] | def python_to_euc(uni_char, as_bytes=False):
"""
Return EUC character from a Python Unicode character.
Converts a one character Python unicode string (e.g. u'\\u4e00') to the
corresponding EUC hex ('d2bb').
"""
euc = repr(uni_char.encode('gb2312'))[1:-1].replace('\\x', '').strip("'")
if as_bytes:
euc = euc.encode('utf-8')
assert isinstance(euc, bytes) # depends on [control=['if'], data=[]]
return euc |
def hash(mapping, bind, values):
""" Generate a sha1 for each of the given values. """
for v in values:
if v is None:
continue
if not isinstance(v, six.string_types):
v = six.text_type(v)
yield sha1(v.encode('utf-8')).hexdigest() | def function[hash, parameter[mapping, bind, values]]:
constant[ Generate a sha1 for each of the given values. ]
for taget[name[v]] in starred[name[values]] begin[:]
if compare[name[v] is constant[None]] begin[:]
continue
if <ast.UnaryOp object at 0x7da18c4cc0d0> begin[:]
variable[v] assign[=] call[name[six].text_type, parameter[name[v]]]
<ast.Yield object at 0x7da18c4ce200> | keyword[def] identifier[hash] ( identifier[mapping] , identifier[bind] , identifier[values] ):
literal[string]
keyword[for] identifier[v] keyword[in] identifier[values] :
keyword[if] identifier[v] keyword[is] keyword[None] :
keyword[continue]
keyword[if] keyword[not] identifier[isinstance] ( identifier[v] , identifier[six] . identifier[string_types] ):
identifier[v] = identifier[six] . identifier[text_type] ( identifier[v] )
keyword[yield] identifier[sha1] ( identifier[v] . identifier[encode] ( literal[string] )). identifier[hexdigest] () | def hash(mapping, bind, values):
""" Generate a sha1 for each of the given values. """
for v in values:
if v is None:
continue # depends on [control=['if'], data=[]]
if not isinstance(v, six.string_types):
v = six.text_type(v) # depends on [control=['if'], data=[]]
yield sha1(v.encode('utf-8')).hexdigest() # depends on [control=['for'], data=['v']] |
def get_cached(path, cache_name=None, **kwargs):
"""Request a resource form the API, first checking if there is a cached
response available. Returns the parsed JSON data.
"""
if gw2api.cache_dir and gw2api.cache_time and cache_name is not False:
if cache_name is None:
cache_name = path
cache_file = os.path.join(gw2api.cache_dir, cache_name)
if mtime(cache_file) >= time.time() - gw2api.cache_time:
with open(cache_file, "r") as fp:
return json.load(fp)
else:
cache_file = None
r = gw2api.session.get(gw2api.BASE_URL + path, **kwargs)
if not r.ok:
try:
response = r.json()
except ValueError: # pragma: no cover
response = None
if isinstance(response, dict) and "text" in response:
r.reason = response["text"]
r.raise_for_status()
data = r.json()
if cache_file:
with open(cache_file, "w") as fp:
json.dump(data, fp, indent=2)
return data | def function[get_cached, parameter[path, cache_name]]:
constant[Request a resource form the API, first checking if there is a cached
response available. Returns the parsed JSON data.
]
if <ast.BoolOp object at 0x7da2054a6170> begin[:]
if compare[name[cache_name] is constant[None]] begin[:]
variable[cache_name] assign[=] name[path]
variable[cache_file] assign[=] call[name[os].path.join, parameter[name[gw2api].cache_dir, name[cache_name]]]
if compare[call[name[mtime], parameter[name[cache_file]]] greater_or_equal[>=] binary_operation[call[name[time].time, parameter[]] - name[gw2api].cache_time]] begin[:]
with call[name[open], parameter[name[cache_file], constant[r]]] begin[:]
return[call[name[json].load, parameter[name[fp]]]]
variable[r] assign[=] call[name[gw2api].session.get, parameter[binary_operation[name[gw2api].BASE_URL + name[path]]]]
if <ast.UnaryOp object at 0x7da2054a7760> begin[:]
<ast.Try object at 0x7da2054a4850>
if <ast.BoolOp object at 0x7da2054a5e70> begin[:]
name[r].reason assign[=] call[name[response]][constant[text]]
call[name[r].raise_for_status, parameter[]]
variable[data] assign[=] call[name[r].json, parameter[]]
if name[cache_file] begin[:]
with call[name[open], parameter[name[cache_file], constant[w]]] begin[:]
call[name[json].dump, parameter[name[data], name[fp]]]
return[name[data]] | keyword[def] identifier[get_cached] ( identifier[path] , identifier[cache_name] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[gw2api] . identifier[cache_dir] keyword[and] identifier[gw2api] . identifier[cache_time] keyword[and] identifier[cache_name] keyword[is] keyword[not] keyword[False] :
keyword[if] identifier[cache_name] keyword[is] keyword[None] :
identifier[cache_name] = identifier[path]
identifier[cache_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[gw2api] . identifier[cache_dir] , identifier[cache_name] )
keyword[if] identifier[mtime] ( identifier[cache_file] )>= identifier[time] . identifier[time] ()- identifier[gw2api] . identifier[cache_time] :
keyword[with] identifier[open] ( identifier[cache_file] , literal[string] ) keyword[as] identifier[fp] :
keyword[return] identifier[json] . identifier[load] ( identifier[fp] )
keyword[else] :
identifier[cache_file] = keyword[None]
identifier[r] = identifier[gw2api] . identifier[session] . identifier[get] ( identifier[gw2api] . identifier[BASE_URL] + identifier[path] ,** identifier[kwargs] )
keyword[if] keyword[not] identifier[r] . identifier[ok] :
keyword[try] :
identifier[response] = identifier[r] . identifier[json] ()
keyword[except] identifier[ValueError] :
identifier[response] = keyword[None]
keyword[if] identifier[isinstance] ( identifier[response] , identifier[dict] ) keyword[and] literal[string] keyword[in] identifier[response] :
identifier[r] . identifier[reason] = identifier[response] [ literal[string] ]
identifier[r] . identifier[raise_for_status] ()
identifier[data] = identifier[r] . identifier[json] ()
keyword[if] identifier[cache_file] :
keyword[with] identifier[open] ( identifier[cache_file] , literal[string] ) keyword[as] identifier[fp] :
identifier[json] . identifier[dump] ( identifier[data] , identifier[fp] , identifier[indent] = literal[int] )
keyword[return] identifier[data] | def get_cached(path, cache_name=None, **kwargs):
"""Request a resource form the API, first checking if there is a cached
response available. Returns the parsed JSON data.
"""
if gw2api.cache_dir and gw2api.cache_time and (cache_name is not False):
if cache_name is None:
cache_name = path # depends on [control=['if'], data=['cache_name']]
cache_file = os.path.join(gw2api.cache_dir, cache_name)
if mtime(cache_file) >= time.time() - gw2api.cache_time:
with open(cache_file, 'r') as fp:
return json.load(fp) # depends on [control=['with'], data=['fp']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
cache_file = None
r = gw2api.session.get(gw2api.BASE_URL + path, **kwargs)
if not r.ok:
try:
response = r.json() # depends on [control=['try'], data=[]]
except ValueError: # pragma: no cover
response = None # depends on [control=['except'], data=[]]
if isinstance(response, dict) and 'text' in response:
r.reason = response['text'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
r.raise_for_status()
data = r.json()
if cache_file:
with open(cache_file, 'w') as fp:
json.dump(data, fp, indent=2) # depends on [control=['with'], data=['fp']] # depends on [control=['if'], data=[]]
return data |
def path(self) -> str:
"""
Accessor for (stringified) path to current tails file.
:return: (stringified) path to current tails file.
"""
config = json.loads(self._tails_config_json)
return join(config['base_dir'], config['file']) | def function[path, parameter[self]]:
constant[
Accessor for (stringified) path to current tails file.
:return: (stringified) path to current tails file.
]
variable[config] assign[=] call[name[json].loads, parameter[name[self]._tails_config_json]]
return[call[name[join], parameter[call[name[config]][constant[base_dir]], call[name[config]][constant[file]]]]] | keyword[def] identifier[path] ( identifier[self] )-> identifier[str] :
literal[string]
identifier[config] = identifier[json] . identifier[loads] ( identifier[self] . identifier[_tails_config_json] )
keyword[return] identifier[join] ( identifier[config] [ literal[string] ], identifier[config] [ literal[string] ]) | def path(self) -> str:
"""
Accessor for (stringified) path to current tails file.
:return: (stringified) path to current tails file.
"""
config = json.loads(self._tails_config_json)
return join(config['base_dir'], config['file']) |
def _get_page_with_optional_heading(self, page_file_path: str) -> str or Dict:
'''Get the content of first heading of source Markdown file, if the file
contains any headings. Return a data element of ``pages`` section
of ``mkdocs.yml`` file.
:param page_file_path: path to source Markdown file
:returns: Unchanged file path or a dictionary: content of first heading, file path
'''
self.logger.debug(f'Looking for the first heading in {page_file_path}')
if page_file_path.endswith('.md'):
page_file_full_path = self.project_path / self.config['src_dir'] / page_file_path
with open(page_file_full_path, encoding='utf8') as page_file:
content = page_file.read()
headings_found = search(
r'^\s*#{1,6}[ \t]+([^\r\n]+?)(?:[ \t]+\{#\S+\})?\s*[\r\n]+',
content
)
if headings_found:
first_heading = headings_found.group(1)
self.logger.debug(f'Heading found: {first_heading}')
return {first_heading: page_file_path}
self.logger.debug(f'No heading found, returning original file path.')
return page_file_path | def function[_get_page_with_optional_heading, parameter[self, page_file_path]]:
constant[Get the content of first heading of source Markdown file, if the file
contains any headings. Return a data element of ``pages`` section
of ``mkdocs.yml`` file.
:param page_file_path: path to source Markdown file
:returns: Unchanged file path or a dictionary: content of first heading, file path
]
call[name[self].logger.debug, parameter[<ast.JoinedStr object at 0x7da1b1668e20>]]
if call[name[page_file_path].endswith, parameter[constant[.md]]] begin[:]
variable[page_file_full_path] assign[=] binary_operation[binary_operation[name[self].project_path / call[name[self].config][constant[src_dir]]] / name[page_file_path]]
with call[name[open], parameter[name[page_file_full_path]]] begin[:]
variable[content] assign[=] call[name[page_file].read, parameter[]]
variable[headings_found] assign[=] call[name[search], parameter[constant[^\s*#{1,6}[ \t]+([^\r\n]+?)(?:[ \t]+\{#\S+\})?\s*[\r\n]+], name[content]]]
if name[headings_found] begin[:]
variable[first_heading] assign[=] call[name[headings_found].group, parameter[constant[1]]]
call[name[self].logger.debug, parameter[<ast.JoinedStr object at 0x7da18f00e470>]]
return[dictionary[[<ast.Name object at 0x7da20e9b0760>], [<ast.Name object at 0x7da20e9b0970>]]]
call[name[self].logger.debug, parameter[<ast.JoinedStr object at 0x7da1b1669180>]]
return[name[page_file_path]] | keyword[def] identifier[_get_page_with_optional_heading] ( identifier[self] , identifier[page_file_path] : identifier[str] )-> identifier[str] keyword[or] identifier[Dict] :
literal[string]
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] )
keyword[if] identifier[page_file_path] . identifier[endswith] ( literal[string] ):
identifier[page_file_full_path] = identifier[self] . identifier[project_path] / identifier[self] . identifier[config] [ literal[string] ]/ identifier[page_file_path]
keyword[with] identifier[open] ( identifier[page_file_full_path] , identifier[encoding] = literal[string] ) keyword[as] identifier[page_file] :
identifier[content] = identifier[page_file] . identifier[read] ()
identifier[headings_found] = identifier[search] (
literal[string] ,
identifier[content]
)
keyword[if] identifier[headings_found] :
identifier[first_heading] = identifier[headings_found] . identifier[group] ( literal[int] )
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] )
keyword[return] { identifier[first_heading] : identifier[page_file_path] }
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] )
keyword[return] identifier[page_file_path] | def _get_page_with_optional_heading(self, page_file_path: str) -> str or Dict:
"""Get the content of first heading of source Markdown file, if the file
contains any headings. Return a data element of ``pages`` section
of ``mkdocs.yml`` file.
:param page_file_path: path to source Markdown file
:returns: Unchanged file path or a dictionary: content of first heading, file path
"""
self.logger.debug(f'Looking for the first heading in {page_file_path}')
if page_file_path.endswith('.md'):
page_file_full_path = self.project_path / self.config['src_dir'] / page_file_path
with open(page_file_full_path, encoding='utf8') as page_file:
content = page_file.read()
headings_found = search('^\\s*#{1,6}[ \\t]+([^\\r\\n]+?)(?:[ \\t]+\\{#\\S+\\})?\\s*[\\r\\n]+', content)
if headings_found:
first_heading = headings_found.group(1)
self.logger.debug(f'Heading found: {first_heading}')
return {first_heading: page_file_path} # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['page_file']] # depends on [control=['if'], data=[]]
self.logger.debug(f'No heading found, returning original file path.')
return page_file_path |
def include_context(predicate, num, iterative):
"""
Return elements in `iterative` including `num` before and after elements.
>>> ''.join(include_context(lambda x: x == '!', 2, 'bb!aa__bb!aa'))
'bb!aabb!aa'
"""
(it0, it1, it2) = itertools.tee(iterative, 3)
psf = _forward_shifted_predicate(predicate, num, it1)
psb = _backward_shifted_predicate(predicate, num, it2)
return (e for (e, pf, pb) in zip(it0, psf, psb) if pf or pb) | def function[include_context, parameter[predicate, num, iterative]]:
constant[
Return elements in `iterative` including `num` before and after elements.
>>> ''.join(include_context(lambda x: x == '!', 2, 'bb!aa__bb!aa'))
'bb!aabb!aa'
]
<ast.Tuple object at 0x7da1b0bb9990> assign[=] call[name[itertools].tee, parameter[name[iterative], constant[3]]]
variable[psf] assign[=] call[name[_forward_shifted_predicate], parameter[name[predicate], name[num], name[it1]]]
variable[psb] assign[=] call[name[_backward_shifted_predicate], parameter[name[predicate], name[num], name[it2]]]
return[<ast.GeneratorExp object at 0x7da1b0bb8850>] | keyword[def] identifier[include_context] ( identifier[predicate] , identifier[num] , identifier[iterative] ):
literal[string]
( identifier[it0] , identifier[it1] , identifier[it2] )= identifier[itertools] . identifier[tee] ( identifier[iterative] , literal[int] )
identifier[psf] = identifier[_forward_shifted_predicate] ( identifier[predicate] , identifier[num] , identifier[it1] )
identifier[psb] = identifier[_backward_shifted_predicate] ( identifier[predicate] , identifier[num] , identifier[it2] )
keyword[return] ( identifier[e] keyword[for] ( identifier[e] , identifier[pf] , identifier[pb] ) keyword[in] identifier[zip] ( identifier[it0] , identifier[psf] , identifier[psb] ) keyword[if] identifier[pf] keyword[or] identifier[pb] ) | def include_context(predicate, num, iterative):
"""
Return elements in `iterative` including `num` before and after elements.
>>> ''.join(include_context(lambda x: x == '!', 2, 'bb!aa__bb!aa'))
'bb!aabb!aa'
"""
(it0, it1, it2) = itertools.tee(iterative, 3)
psf = _forward_shifted_predicate(predicate, num, it1)
psb = _backward_shifted_predicate(predicate, num, it2)
return (e for (e, pf, pb) in zip(it0, psf, psb) if pf or pb) |
def spellcheck(word, wordlist, depth = 2):
"""
Given a word list and a depth parameter, return all words w' in the wordlist
with LevenshteinDistance(word, w') <= depth
:param word:
:param wordlist:
>>> Dic = ['pes', 'pesse', 'pease', 'peis', 'peisse', 'pise', 'peose', 'poese', 'poisen']
>>> spellcheck('pece', Dic, depth = 2)
['pease', 'peis', 'peose', 'pes', 'pesse', 'pise', 'poese']
>>> spellcheck('pece', Dic, depth = 3)
['pease', 'peis', 'peisse', 'peose', 'pes', 'pesse', 'pise', 'poese']
"""
Aut = LevenshteinAutomaton(word, depth = depth).convert_to_deterministic()
W = make_worlist_trie(wordlist)
return sorted(list(walk_trie(W, '', Aut.s, Aut))) | def function[spellcheck, parameter[word, wordlist, depth]]:
constant[
Given a word list and a depth parameter, return all words w' in the wordlist
with LevenshteinDistance(word, w') <= depth
:param word:
:param wordlist:
>>> Dic = ['pes', 'pesse', 'pease', 'peis', 'peisse', 'pise', 'peose', 'poese', 'poisen']
>>> spellcheck('pece', Dic, depth = 2)
['pease', 'peis', 'peose', 'pes', 'pesse', 'pise', 'poese']
>>> spellcheck('pece', Dic, depth = 3)
['pease', 'peis', 'peisse', 'peose', 'pes', 'pesse', 'pise', 'poese']
]
variable[Aut] assign[=] call[call[name[LevenshteinAutomaton], parameter[name[word]]].convert_to_deterministic, parameter[]]
variable[W] assign[=] call[name[make_worlist_trie], parameter[name[wordlist]]]
return[call[name[sorted], parameter[call[name[list], parameter[call[name[walk_trie], parameter[name[W], constant[], name[Aut].s, name[Aut]]]]]]]] | keyword[def] identifier[spellcheck] ( identifier[word] , identifier[wordlist] , identifier[depth] = literal[int] ):
literal[string]
identifier[Aut] = identifier[LevenshteinAutomaton] ( identifier[word] , identifier[depth] = identifier[depth] ). identifier[convert_to_deterministic] ()
identifier[W] = identifier[make_worlist_trie] ( identifier[wordlist] )
keyword[return] identifier[sorted] ( identifier[list] ( identifier[walk_trie] ( identifier[W] , literal[string] , identifier[Aut] . identifier[s] , identifier[Aut] ))) | def spellcheck(word, wordlist, depth=2):
"""
Given a word list and a depth parameter, return all words w' in the wordlist
with LevenshteinDistance(word, w') <= depth
:param word:
:param wordlist:
>>> Dic = ['pes', 'pesse', 'pease', 'peis', 'peisse', 'pise', 'peose', 'poese', 'poisen']
>>> spellcheck('pece', Dic, depth = 2)
['pease', 'peis', 'peose', 'pes', 'pesse', 'pise', 'poese']
>>> spellcheck('pece', Dic, depth = 3)
['pease', 'peis', 'peisse', 'peose', 'pes', 'pesse', 'pise', 'poese']
"""
Aut = LevenshteinAutomaton(word, depth=depth).convert_to_deterministic()
W = make_worlist_trie(wordlist)
return sorted(list(walk_trie(W, '', Aut.s, Aut))) |
def dot_product_mpnn_attention(q,
k,
v,
adjacency_matrix,
num_edge_types,
num_transforms=None,
use_weighted_sum=False,
name=None):
"""Dot product attention with edge vectors.
Let B be the number of batches.
Let N be the number of nodes in the graph.
Let K be the size of the attention keys/queries.
Let V be the size of the attention values.
Let T be the total number of transforms (num_transforms).
Args:
q: The query Tensor of shape [B, N, K].
k: The key Tensor of shape [B, T, N, K].
v: The value Tensor of shape [B, T, N, V].
adjacency_matrix: A Tensor of shape [B, N, N, T]. An entry at
indices b, i, j, k is the indicator of the edge
from node j to node i in batch b. A standard adjacency matrix will only
have one edge type while a mutigraph will have multiple edge types.
num_edge_types: An integer specifying number of edge types.
num_transforms: An integer indicating number of transforms (T). If None,
then num_transforms will be equal to num_edge_types.
use_weighted_sum: If False, will only use a single transform per edge type.
Otherwise, use a learned weighted sum of transforms per edge type.
name: A string.
Returns:
A Tensor of shape [B, N, V] storing the result of computing attention
weights using the queries and keys and combining the values according to
those weights.
Raises:
ValueError: if num_transforms doesn't equal num_edge_types and not using
weighted sum.
"""
with tf.variable_scope(
name,
default_name="dot_product_mpnn_attention",
values=[q, k, v, adjacency_matrix, num_edge_types]):
# If not explicitly set, use num_transforms set to num_edge_types.
num_transforms = (
num_edge_types if num_transforms is None else num_transforms)
if not use_weighted_sum and num_transforms != num_edge_types:
raise ValueError("num_transforms must equal num_edge_types unless "
"use_weighted_sum is True")
# Computes the raw dot-product attention values between each query and
# the corresponding keys it needs to consider.
#
# This operation takes the dot product of (the query for
# each node) and (the key for each node for each possible edge type),
# creating an N x N matrix for each edge type. The entry at index (i, j)
# is the dot-product for the edge from node i to node j of the appropriate
# type. These dot products will eventually become attention weights
# specifying how much node i weights an edge of that type coming from node
# j.
all_edge_logits = tf.matmul(
tf.tile(tf.expand_dims(q, axis=1), [1, num_edge_types, 1, 1]),
k,
transpose_b=True)
# The adjacency matrix assumes there is only one directed edge (i <- j) for
# each pair of nodes. If such an edge exists, it contains the integer
# type of that edge at position (i, j) of the adjacency matrix.
#
# Construct edge_vectors of shape [B, N, N, T].
if use_weighted_sum:
# Use dense representation for edge vectors.
edge_vectors = make_edge_vectors(
adjacency_matrix,
num_edge_types,
num_transforms)
else:
# Generate one-hot vectors based on edge types.
# If there is an edge from node j to node i of type t, then index t of the
# last dimension is 1 for entry (i, j) of the second and third dimensions.
edge_vectors = tf.one_hot(adjacency_matrix, num_transforms)
# Rearranging the dimensions to match the shape of all_edge_logits.
edge_vectors = tf.transpose(edge_vectors, [0, 3, 1, 2])
# Element-wise multiplies all_edge_logits and edge_vectors.
#
# In other words: all_edge_logits contains N x N matrices of query-key
# products. This element-wise multiplication zeroes out entries that do not
# correspond to actual edges in the graph of the appropriate edge type.
# all_edge_logits retains shape [B, T, N, N].
all_edge_logits *= edge_vectors
# Since there can only be one edge from node A to node B, we can collapse
# the T different adjacency matrices containing key-query pairs into one
# adjacency matrix. logits is [B, N, N].
# TODO(dbieber): Use a reshape instead of reduce sum to attend over all
# edges instead of over all neighboring nodes to handle the multigraph case.
logits = tf.reduce_sum(all_edge_logits, axis=1)
# For pairs of nodes with no edges between them, add a large negative bias
# to each location without an edge so that the softmax of entries with the
# value 0 become a small negative number instead.
bias = 0
bias = tf.to_float(tf.equal(
tf.reduce_sum(adjacency_matrix, axis=-1), 0)) * -1e9
logits += bias
# Turn the raw key-query products into a probability distribution (or,
# in terms of attention, weights). The softmax is computed across the
# last dimension of logits.
compatibility = tf.nn.softmax(logits) # Shape [B, N, N].
# Computes a summary showing the attention matrix as an image. Does not do
# any work toward actually performing attention.
common_attention.attention_image_summary(
tf.expand_dims(compatibility, axis=1), None)
# Repeats the attention matrix T times for each batch, producing
# a tensor with shape [B, T, N, N] where the [N, N] component is T
# repeats of the values found in compatibility.
edge_compatibility = tf.tile(
tf.expand_dims(compatibility, axis=1), [1, num_edge_types, 1, 1])
# Zeroes out the entries in edge_compatibility that do not correspond to
# actual edges.
edge_compatibility *= edge_vectors # Shape [B, T, N, N].
output = compute_values(edge_compatibility, v)
return output | def function[dot_product_mpnn_attention, parameter[q, k, v, adjacency_matrix, num_edge_types, num_transforms, use_weighted_sum, name]]:
constant[Dot product attention with edge vectors.
Let B be the number of batches.
Let N be the number of nodes in the graph.
Let K be the size of the attention keys/queries.
Let V be the size of the attention values.
Let T be the total number of transforms (num_transforms).
Args:
q: The query Tensor of shape [B, N, K].
k: The key Tensor of shape [B, T, N, K].
v: The value Tensor of shape [B, T, N, V].
adjacency_matrix: A Tensor of shape [B, N, N, T]. An entry at
indices b, i, j, k is the indicator of the edge
from node j to node i in batch b. A standard adjacency matrix will only
have one edge type while a mutigraph will have multiple edge types.
num_edge_types: An integer specifying number of edge types.
num_transforms: An integer indicating number of transforms (T). If None,
then num_transforms will be equal to num_edge_types.
use_weighted_sum: If False, will only use a single transform per edge type.
Otherwise, use a learned weighted sum of transforms per edge type.
name: A string.
Returns:
A Tensor of shape [B, N, V] storing the result of computing attention
weights using the queries and keys and combining the values according to
those weights.
Raises:
ValueError: if num_transforms doesn't equal num_edge_types and not using
weighted sum.
]
with call[name[tf].variable_scope, parameter[name[name]]] begin[:]
variable[num_transforms] assign[=] <ast.IfExp object at 0x7da1b1e16e60>
if <ast.BoolOp object at 0x7da1b1e16620> begin[:]
<ast.Raise object at 0x7da1b1e15b10>
variable[all_edge_logits] assign[=] call[name[tf].matmul, parameter[call[name[tf].tile, parameter[call[name[tf].expand_dims, parameter[name[q]]], list[[<ast.Constant object at 0x7da1b1e16a70>, <ast.Name object at 0x7da1b1e16110>, <ast.Constant object at 0x7da1b1e15960>, <ast.Constant object at 0x7da1b1e164a0>]]]], name[k]]]
if name[use_weighted_sum] begin[:]
variable[edge_vectors] assign[=] call[name[make_edge_vectors], parameter[name[adjacency_matrix], name[num_edge_types], name[num_transforms]]]
variable[edge_vectors] assign[=] call[name[tf].transpose, parameter[name[edge_vectors], list[[<ast.Constant object at 0x7da1b1e14220>, <ast.Constant object at 0x7da1b1e15120>, <ast.Constant object at 0x7da1b1e15900>, <ast.Constant object at 0x7da1b1e170d0>]]]]
<ast.AugAssign object at 0x7da1b1e14f40>
variable[logits] assign[=] call[name[tf].reduce_sum, parameter[name[all_edge_logits]]]
variable[bias] assign[=] constant[0]
variable[bias] assign[=] binary_operation[call[name[tf].to_float, parameter[call[name[tf].equal, parameter[call[name[tf].reduce_sum, parameter[name[adjacency_matrix]]], constant[0]]]]] * <ast.UnaryOp object at 0x7da1b1e145b0>]
<ast.AugAssign object at 0x7da1b1e177f0>
variable[compatibility] assign[=] call[name[tf].nn.softmax, parameter[name[logits]]]
call[name[common_attention].attention_image_summary, parameter[call[name[tf].expand_dims, parameter[name[compatibility]]], constant[None]]]
variable[edge_compatibility] assign[=] call[name[tf].tile, parameter[call[name[tf].expand_dims, parameter[name[compatibility]]], list[[<ast.Constant object at 0x7da1b1e14cd0>, <ast.Name object at 0x7da1b1e141c0>, <ast.Constant object at 0x7da1b1e166e0>, <ast.Constant object at 0x7da1b1e17af0>]]]]
<ast.AugAssign object at 0x7da1b1e14280>
variable[output] assign[=] call[name[compute_values], parameter[name[edge_compatibility], name[v]]]
return[name[output]] | keyword[def] identifier[dot_product_mpnn_attention] ( identifier[q] ,
identifier[k] ,
identifier[v] ,
identifier[adjacency_matrix] ,
identifier[num_edge_types] ,
identifier[num_transforms] = keyword[None] ,
identifier[use_weighted_sum] = keyword[False] ,
identifier[name] = keyword[None] ):
literal[string]
keyword[with] identifier[tf] . identifier[variable_scope] (
identifier[name] ,
identifier[default_name] = literal[string] ,
identifier[values] =[ identifier[q] , identifier[k] , identifier[v] , identifier[adjacency_matrix] , identifier[num_edge_types] ]):
identifier[num_transforms] =(
identifier[num_edge_types] keyword[if] identifier[num_transforms] keyword[is] keyword[None] keyword[else] identifier[num_transforms] )
keyword[if] keyword[not] identifier[use_weighted_sum] keyword[and] identifier[num_transforms] != identifier[num_edge_types] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
identifier[all_edge_logits] = identifier[tf] . identifier[matmul] (
identifier[tf] . identifier[tile] ( identifier[tf] . identifier[expand_dims] ( identifier[q] , identifier[axis] = literal[int] ),[ literal[int] , identifier[num_edge_types] , literal[int] , literal[int] ]),
identifier[k] ,
identifier[transpose_b] = keyword[True] )
keyword[if] identifier[use_weighted_sum] :
identifier[edge_vectors] = identifier[make_edge_vectors] (
identifier[adjacency_matrix] ,
identifier[num_edge_types] ,
identifier[num_transforms] )
keyword[else] :
identifier[edge_vectors] = identifier[tf] . identifier[one_hot] ( identifier[adjacency_matrix] , identifier[num_transforms] )
identifier[edge_vectors] = identifier[tf] . identifier[transpose] ( identifier[edge_vectors] ,[ literal[int] , literal[int] , literal[int] , literal[int] ])
identifier[all_edge_logits] *= identifier[edge_vectors]
identifier[logits] = identifier[tf] . identifier[reduce_sum] ( identifier[all_edge_logits] , identifier[axis] = literal[int] )
identifier[bias] = literal[int]
identifier[bias] = identifier[tf] . identifier[to_float] ( identifier[tf] . identifier[equal] (
identifier[tf] . identifier[reduce_sum] ( identifier[adjacency_matrix] , identifier[axis] =- literal[int] ), literal[int] ))*- literal[int]
identifier[logits] += identifier[bias]
identifier[compatibility] = identifier[tf] . identifier[nn] . identifier[softmax] ( identifier[logits] )
identifier[common_attention] . identifier[attention_image_summary] (
identifier[tf] . identifier[expand_dims] ( identifier[compatibility] , identifier[axis] = literal[int] ), keyword[None] )
identifier[edge_compatibility] = identifier[tf] . identifier[tile] (
identifier[tf] . identifier[expand_dims] ( identifier[compatibility] , identifier[axis] = literal[int] ),[ literal[int] , identifier[num_edge_types] , literal[int] , literal[int] ])
identifier[edge_compatibility] *= identifier[edge_vectors]
identifier[output] = identifier[compute_values] ( identifier[edge_compatibility] , identifier[v] )
keyword[return] identifier[output] | def dot_product_mpnn_attention(q, k, v, adjacency_matrix, num_edge_types, num_transforms=None, use_weighted_sum=False, name=None):
"""Dot product attention with edge vectors.
Let B be the number of batches.
Let N be the number of nodes in the graph.
Let K be the size of the attention keys/queries.
Let V be the size of the attention values.
Let T be the total number of transforms (num_transforms).
Args:
q: The query Tensor of shape [B, N, K].
k: The key Tensor of shape [B, T, N, K].
v: The value Tensor of shape [B, T, N, V].
adjacency_matrix: A Tensor of shape [B, N, N, T]. An entry at
indices b, i, j, k is the indicator of the edge
from node j to node i in batch b. A standard adjacency matrix will only
have one edge type while a mutigraph will have multiple edge types.
num_edge_types: An integer specifying number of edge types.
num_transforms: An integer indicating number of transforms (T). If None,
then num_transforms will be equal to num_edge_types.
use_weighted_sum: If False, will only use a single transform per edge type.
Otherwise, use a learned weighted sum of transforms per edge type.
name: A string.
Returns:
A Tensor of shape [B, N, V] storing the result of computing attention
weights using the queries and keys and combining the values according to
those weights.
Raises:
ValueError: if num_transforms doesn't equal num_edge_types and not using
weighted sum.
"""
with tf.variable_scope(name, default_name='dot_product_mpnn_attention', values=[q, k, v, adjacency_matrix, num_edge_types]):
# If not explicitly set, use num_transforms set to num_edge_types.
num_transforms = num_edge_types if num_transforms is None else num_transforms
if not use_weighted_sum and num_transforms != num_edge_types:
raise ValueError('num_transforms must equal num_edge_types unless use_weighted_sum is True') # depends on [control=['if'], data=[]]
# Computes the raw dot-product attention values between each query and
# the corresponding keys it needs to consider.
#
# This operation takes the dot product of (the query for
# each node) and (the key for each node for each possible edge type),
# creating an N x N matrix for each edge type. The entry at index (i, j)
# is the dot-product for the edge from node i to node j of the appropriate
# type. These dot products will eventually become attention weights
# specifying how much node i weights an edge of that type coming from node
# j.
all_edge_logits = tf.matmul(tf.tile(tf.expand_dims(q, axis=1), [1, num_edge_types, 1, 1]), k, transpose_b=True)
# The adjacency matrix assumes there is only one directed edge (i <- j) for
# each pair of nodes. If such an edge exists, it contains the integer
# type of that edge at position (i, j) of the adjacency matrix.
#
# Construct edge_vectors of shape [B, N, N, T].
if use_weighted_sum:
# Use dense representation for edge vectors.
edge_vectors = make_edge_vectors(adjacency_matrix, num_edge_types, num_transforms) # depends on [control=['if'], data=[]]
else:
# Generate one-hot vectors based on edge types.
# If there is an edge from node j to node i of type t, then index t of the
# last dimension is 1 for entry (i, j) of the second and third dimensions.
edge_vectors = tf.one_hot(adjacency_matrix, num_transforms)
# Rearranging the dimensions to match the shape of all_edge_logits.
edge_vectors = tf.transpose(edge_vectors, [0, 3, 1, 2])
# Element-wise multiplies all_edge_logits and edge_vectors.
#
# In other words: all_edge_logits contains N x N matrices of query-key
# products. This element-wise multiplication zeroes out entries that do not
# correspond to actual edges in the graph of the appropriate edge type.
# all_edge_logits retains shape [B, T, N, N].
all_edge_logits *= edge_vectors
# Since there can only be one edge from node A to node B, we can collapse
# the T different adjacency matrices containing key-query pairs into one
# adjacency matrix. logits is [B, N, N].
# TODO(dbieber): Use a reshape instead of reduce sum to attend over all
# edges instead of over all neighboring nodes to handle the multigraph case.
logits = tf.reduce_sum(all_edge_logits, axis=1)
# For pairs of nodes with no edges between them, add a large negative bias
# to each location without an edge so that the softmax of entries with the
# value 0 become a small negative number instead.
bias = 0
bias = tf.to_float(tf.equal(tf.reduce_sum(adjacency_matrix, axis=-1), 0)) * -1000000000.0
logits += bias
# Turn the raw key-query products into a probability distribution (or,
# in terms of attention, weights). The softmax is computed across the
# last dimension of logits.
compatibility = tf.nn.softmax(logits) # Shape [B, N, N].
# Computes a summary showing the attention matrix as an image. Does not do
# any work toward actually performing attention.
common_attention.attention_image_summary(tf.expand_dims(compatibility, axis=1), None)
# Repeats the attention matrix T times for each batch, producing
# a tensor with shape [B, T, N, N] where the [N, N] component is T
# repeats of the values found in compatibility.
edge_compatibility = tf.tile(tf.expand_dims(compatibility, axis=1), [1, num_edge_types, 1, 1])
# Zeroes out the entries in edge_compatibility that do not correspond to
# actual edges.
edge_compatibility *= edge_vectors # Shape [B, T, N, N].
output = compute_values(edge_compatibility, v)
return output # depends on [control=['with'], data=[]] |
def _get_cluster_sizes(self):
"""
Returns the marker size (in points, e.g. area of the circle) based on
the scores, using the prop_to_size scaling mechanism.
"""
# NOTE: log and power are hardcoded, should we allow the user to specify?
return prop_to_size(
self.scores_, mi=self.min_size, ma=self.max_size, log=False, power=0.5
) | def function[_get_cluster_sizes, parameter[self]]:
constant[
Returns the marker size (in points, e.g. area of the circle) based on
the scores, using the prop_to_size scaling mechanism.
]
return[call[name[prop_to_size], parameter[name[self].scores_]]] | keyword[def] identifier[_get_cluster_sizes] ( identifier[self] ):
literal[string]
keyword[return] identifier[prop_to_size] (
identifier[self] . identifier[scores_] , identifier[mi] = identifier[self] . identifier[min_size] , identifier[ma] = identifier[self] . identifier[max_size] , identifier[log] = keyword[False] , identifier[power] = literal[int]
) | def _get_cluster_sizes(self):
"""
Returns the marker size (in points, e.g. area of the circle) based on
the scores, using the prop_to_size scaling mechanism.
"""
# NOTE: log and power are hardcoded, should we allow the user to specify?
return prop_to_size(self.scores_, mi=self.min_size, ma=self.max_size, log=False, power=0.5) |
def most_read_creators_card(num=10):
"""
Displays a card showing the Creators who have the most Readings
associated with their Publications.
In spectator_core tags, rather than spectator_reading so it can still be
used on core pages, even if spectator_reading isn't installed.
"""
if spectator_apps.is_enabled('reading'):
object_list = most_read_creators(num=num)
object_list = chartify(object_list, 'num_readings', cutoff=1)
return {
'card_title': 'Most read authors',
'score_attr': 'num_readings',
'object_list': object_list,
} | def function[most_read_creators_card, parameter[num]]:
constant[
Displays a card showing the Creators who have the most Readings
associated with their Publications.
In spectator_core tags, rather than spectator_reading so it can still be
used on core pages, even if spectator_reading isn't installed.
]
if call[name[spectator_apps].is_enabled, parameter[constant[reading]]] begin[:]
variable[object_list] assign[=] call[name[most_read_creators], parameter[]]
variable[object_list] assign[=] call[name[chartify], parameter[name[object_list], constant[num_readings]]]
return[dictionary[[<ast.Constant object at 0x7da18bc72f80>, <ast.Constant object at 0x7da18bc733d0>, <ast.Constant object at 0x7da18bc73370>], [<ast.Constant object at 0x7da18bc73820>, <ast.Constant object at 0x7da18bc72ce0>, <ast.Name object at 0x7da18bc71cc0>]]] | keyword[def] identifier[most_read_creators_card] ( identifier[num] = literal[int] ):
literal[string]
keyword[if] identifier[spectator_apps] . identifier[is_enabled] ( literal[string] ):
identifier[object_list] = identifier[most_read_creators] ( identifier[num] = identifier[num] )
identifier[object_list] = identifier[chartify] ( identifier[object_list] , literal[string] , identifier[cutoff] = literal[int] )
keyword[return] {
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[object_list] ,
} | def most_read_creators_card(num=10):
"""
Displays a card showing the Creators who have the most Readings
associated with their Publications.
In spectator_core tags, rather than spectator_reading so it can still be
used on core pages, even if spectator_reading isn't installed.
"""
if spectator_apps.is_enabled('reading'):
object_list = most_read_creators(num=num)
object_list = chartify(object_list, 'num_readings', cutoff=1)
return {'card_title': 'Most read authors', 'score_attr': 'num_readings', 'object_list': object_list} # depends on [control=['if'], data=[]] |
def get_argument_starttime(self):
"""
Helper function to get starttime argument.
Raises exception if argument is missing.
Returns the starttime argument.
"""
try:
starttime = self.get_argument(constants.PARAM_STARTTIME)
return starttime
except tornado.web.MissingArgumentError as e:
raise Exception(e.log_message) | def function[get_argument_starttime, parameter[self]]:
constant[
Helper function to get starttime argument.
Raises exception if argument is missing.
Returns the starttime argument.
]
<ast.Try object at 0x7da2054a4dc0> | keyword[def] identifier[get_argument_starttime] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[starttime] = identifier[self] . identifier[get_argument] ( identifier[constants] . identifier[PARAM_STARTTIME] )
keyword[return] identifier[starttime]
keyword[except] identifier[tornado] . identifier[web] . identifier[MissingArgumentError] keyword[as] identifier[e] :
keyword[raise] identifier[Exception] ( identifier[e] . identifier[log_message] ) | def get_argument_starttime(self):
"""
Helper function to get starttime argument.
Raises exception if argument is missing.
Returns the starttime argument.
"""
try:
starttime = self.get_argument(constants.PARAM_STARTTIME)
return starttime # depends on [control=['try'], data=[]]
except tornado.web.MissingArgumentError as e:
raise Exception(e.log_message) # depends on [control=['except'], data=['e']] |
def port_profile_qos_profile_qos_flowcontrol_pfc_pfc_cos(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
port_profile = ET.SubElement(config, "port-profile", xmlns="urn:brocade.com:mgmt:brocade-port-profile")
name_key = ET.SubElement(port_profile, "name")
name_key.text = kwargs.pop('name')
qos_profile = ET.SubElement(port_profile, "qos-profile")
qos = ET.SubElement(qos_profile, "qos")
flowcontrol = ET.SubElement(qos, "flowcontrol")
pfc = ET.SubElement(flowcontrol, "pfc")
pfc_cos = ET.SubElement(pfc, "pfc-cos")
pfc_cos.text = kwargs.pop('pfc_cos')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[port_profile_qos_profile_qos_flowcontrol_pfc_pfc_cos, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[port_profile] assign[=] call[name[ET].SubElement, parameter[name[config], constant[port-profile]]]
variable[name_key] assign[=] call[name[ET].SubElement, parameter[name[port_profile], constant[name]]]
name[name_key].text assign[=] call[name[kwargs].pop, parameter[constant[name]]]
variable[qos_profile] assign[=] call[name[ET].SubElement, parameter[name[port_profile], constant[qos-profile]]]
variable[qos] assign[=] call[name[ET].SubElement, parameter[name[qos_profile], constant[qos]]]
variable[flowcontrol] assign[=] call[name[ET].SubElement, parameter[name[qos], constant[flowcontrol]]]
variable[pfc] assign[=] call[name[ET].SubElement, parameter[name[flowcontrol], constant[pfc]]]
variable[pfc_cos] assign[=] call[name[ET].SubElement, parameter[name[pfc], constant[pfc-cos]]]
name[pfc_cos].text assign[=] call[name[kwargs].pop, parameter[constant[pfc_cos]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[port_profile_qos_profile_qos_flowcontrol_pfc_pfc_cos] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[port_profile] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[name_key] = identifier[ET] . identifier[SubElement] ( identifier[port_profile] , literal[string] )
identifier[name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[qos_profile] = identifier[ET] . identifier[SubElement] ( identifier[port_profile] , literal[string] )
identifier[qos] = identifier[ET] . identifier[SubElement] ( identifier[qos_profile] , literal[string] )
identifier[flowcontrol] = identifier[ET] . identifier[SubElement] ( identifier[qos] , literal[string] )
identifier[pfc] = identifier[ET] . identifier[SubElement] ( identifier[flowcontrol] , literal[string] )
identifier[pfc_cos] = identifier[ET] . identifier[SubElement] ( identifier[pfc] , literal[string] )
identifier[pfc_cos] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def port_profile_qos_profile_qos_flowcontrol_pfc_pfc_cos(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
port_profile = ET.SubElement(config, 'port-profile', xmlns='urn:brocade.com:mgmt:brocade-port-profile')
name_key = ET.SubElement(port_profile, 'name')
name_key.text = kwargs.pop('name')
qos_profile = ET.SubElement(port_profile, 'qos-profile')
qos = ET.SubElement(qos_profile, 'qos')
flowcontrol = ET.SubElement(qos, 'flowcontrol')
pfc = ET.SubElement(flowcontrol, 'pfc')
pfc_cos = ET.SubElement(pfc, 'pfc-cos')
pfc_cos.text = kwargs.pop('pfc_cos')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def create(backbone: ModelFactory, input_block: typing.Optional[ModelFactory]=None):
""" Vel factory function """
if input_block is None:
input_block = IdentityFactory()
return PolicyGradientRnnModelFactory(
input_block=input_block,
backbone=backbone
) | def function[create, parameter[backbone, input_block]]:
constant[ Vel factory function ]
if compare[name[input_block] is constant[None]] begin[:]
variable[input_block] assign[=] call[name[IdentityFactory], parameter[]]
return[call[name[PolicyGradientRnnModelFactory], parameter[]]] | keyword[def] identifier[create] ( identifier[backbone] : identifier[ModelFactory] , identifier[input_block] : identifier[typing] . identifier[Optional] [ identifier[ModelFactory] ]= keyword[None] ):
literal[string]
keyword[if] identifier[input_block] keyword[is] keyword[None] :
identifier[input_block] = identifier[IdentityFactory] ()
keyword[return] identifier[PolicyGradientRnnModelFactory] (
identifier[input_block] = identifier[input_block] ,
identifier[backbone] = identifier[backbone]
) | def create(backbone: ModelFactory, input_block: typing.Optional[ModelFactory]=None):
""" Vel factory function """
if input_block is None:
input_block = IdentityFactory() # depends on [control=['if'], data=['input_block']]
return PolicyGradientRnnModelFactory(input_block=input_block, backbone=backbone) |
def make_join_request(self, password = None, history_maxchars = None,
history_maxstanzas = None, history_seconds = None,
history_since = None):
"""
Make the presence stanza a MUC room join request.
:Parameters:
- `password`: password to the room.
- `history_maxchars`: limit of the total number of characters in
history.
- `history_maxstanzas`: limit of the total number of messages in
history.
- `history_seconds`: send only messages received in the last
`seconds` seconds.
- `history_since`: Send only the messages received since the
dateTime specified (UTC).
:Types:
- `password`: `unicode`
- `history_maxchars`: `int`
- `history_maxstanzas`: `int`
- `history_seconds`: `int`
- `history_since`: `datetime.datetime`
"""
self.clear_muc_child()
self.muc_child=MucX(parent=self.xmlnode)
if (history_maxchars is not None or history_maxstanzas is not None
or history_seconds is not None or history_since is not None):
history = HistoryParameters(history_maxchars, history_maxstanzas,
history_seconds, history_since)
self.muc_child.set_history(history)
if password is not None:
self.muc_child.set_password(password) | def function[make_join_request, parameter[self, password, history_maxchars, history_maxstanzas, history_seconds, history_since]]:
constant[
Make the presence stanza a MUC room join request.
:Parameters:
- `password`: password to the room.
- `history_maxchars`: limit of the total number of characters in
history.
- `history_maxstanzas`: limit of the total number of messages in
history.
- `history_seconds`: send only messages received in the last
`seconds` seconds.
- `history_since`: Send only the messages received since the
dateTime specified (UTC).
:Types:
- `password`: `unicode`
- `history_maxchars`: `int`
- `history_maxstanzas`: `int`
- `history_seconds`: `int`
- `history_since`: `datetime.datetime`
]
call[name[self].clear_muc_child, parameter[]]
name[self].muc_child assign[=] call[name[MucX], parameter[]]
if <ast.BoolOp object at 0x7da18f721510> begin[:]
variable[history] assign[=] call[name[HistoryParameters], parameter[name[history_maxchars], name[history_maxstanzas], name[history_seconds], name[history_since]]]
call[name[self].muc_child.set_history, parameter[name[history]]]
if compare[name[password] is_not constant[None]] begin[:]
call[name[self].muc_child.set_password, parameter[name[password]]] | keyword[def] identifier[make_join_request] ( identifier[self] , identifier[password] = keyword[None] , identifier[history_maxchars] = keyword[None] ,
identifier[history_maxstanzas] = keyword[None] , identifier[history_seconds] = keyword[None] ,
identifier[history_since] = keyword[None] ):
literal[string]
identifier[self] . identifier[clear_muc_child] ()
identifier[self] . identifier[muc_child] = identifier[MucX] ( identifier[parent] = identifier[self] . identifier[xmlnode] )
keyword[if] ( identifier[history_maxchars] keyword[is] keyword[not] keyword[None] keyword[or] identifier[history_maxstanzas] keyword[is] keyword[not] keyword[None]
keyword[or] identifier[history_seconds] keyword[is] keyword[not] keyword[None] keyword[or] identifier[history_since] keyword[is] keyword[not] keyword[None] ):
identifier[history] = identifier[HistoryParameters] ( identifier[history_maxchars] , identifier[history_maxstanzas] ,
identifier[history_seconds] , identifier[history_since] )
identifier[self] . identifier[muc_child] . identifier[set_history] ( identifier[history] )
keyword[if] identifier[password] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[muc_child] . identifier[set_password] ( identifier[password] ) | def make_join_request(self, password=None, history_maxchars=None, history_maxstanzas=None, history_seconds=None, history_since=None):
"""
Make the presence stanza a MUC room join request.
:Parameters:
- `password`: password to the room.
- `history_maxchars`: limit of the total number of characters in
history.
- `history_maxstanzas`: limit of the total number of messages in
history.
- `history_seconds`: send only messages received in the last
`seconds` seconds.
- `history_since`: Send only the messages received since the
dateTime specified (UTC).
:Types:
- `password`: `unicode`
- `history_maxchars`: `int`
- `history_maxstanzas`: `int`
- `history_seconds`: `int`
- `history_since`: `datetime.datetime`
"""
self.clear_muc_child()
self.muc_child = MucX(parent=self.xmlnode)
if history_maxchars is not None or history_maxstanzas is not None or history_seconds is not None or (history_since is not None):
history = HistoryParameters(history_maxchars, history_maxstanzas, history_seconds, history_since)
self.muc_child.set_history(history) # depends on [control=['if'], data=[]]
if password is not None:
self.muc_child.set_password(password) # depends on [control=['if'], data=['password']] |
def GetTemplateArgs(clean_lines, linenum):
"""Find list of template arguments associated with this function declaration.
Args:
clean_lines: A CleansedLines instance containing the file.
linenum: Line number containing the start of the function declaration,
usually one line after the end of the template-argument-list.
Returns:
Set of type names, or empty set if this does not appear to have
any template parameters.
"""
# Find start of function
func_line = linenum
while func_line > 0:
line = clean_lines.elided[func_line]
if Match(r'^\s*$', line):
return set()
if line.find('(') >= 0:
break
func_line -= 1
if func_line == 0:
return set()
# Collapse template-argument-list into a single string
argument_list = ''
match = Match(r'^(\s*template\s*)<', clean_lines.elided[func_line])
if match:
# template-argument-list on the same line as function name
start_col = len(match.group(1))
_, end_line, end_col = CloseExpression(clean_lines, func_line, start_col)
if end_col > -1 and end_line == func_line:
start_col += 1 # Skip the opening bracket
argument_list = clean_lines.elided[func_line][start_col:end_col]
elif func_line > 1:
# template-argument-list one line before function name
match = Match(r'^(.*)>\s*$', clean_lines.elided[func_line - 1])
if match:
end_col = len(match.group(1))
_, start_line, start_col = ReverseCloseExpression(
clean_lines, func_line - 1, end_col)
if start_col > -1:
start_col += 1 # Skip the opening bracket
while start_line < func_line - 1:
argument_list += clean_lines.elided[start_line][start_col:]
start_col = 0
start_line += 1
argument_list += clean_lines.elided[func_line - 1][start_col:end_col]
if not argument_list:
return set()
# Extract type names
typenames = set()
while True:
match = Match(r'^[,\s]*(?:typename|class)(?:\.\.\.)?\s+(\w+)(.*)$',
argument_list)
if not match:
break
typenames.add(match.group(1))
argument_list = match.group(2)
return typenames | def function[GetTemplateArgs, parameter[clean_lines, linenum]]:
constant[Find list of template arguments associated with this function declaration.
Args:
clean_lines: A CleansedLines instance containing the file.
linenum: Line number containing the start of the function declaration,
usually one line after the end of the template-argument-list.
Returns:
Set of type names, or empty set if this does not appear to have
any template parameters.
]
variable[func_line] assign[=] name[linenum]
while compare[name[func_line] greater[>] constant[0]] begin[:]
variable[line] assign[=] call[name[clean_lines].elided][name[func_line]]
if call[name[Match], parameter[constant[^\s*$], name[line]]] begin[:]
return[call[name[set], parameter[]]]
if compare[call[name[line].find, parameter[constant[(]]] greater_or_equal[>=] constant[0]] begin[:]
break
<ast.AugAssign object at 0x7da1b1db67d0>
if compare[name[func_line] equal[==] constant[0]] begin[:]
return[call[name[set], parameter[]]]
variable[argument_list] assign[=] constant[]
variable[match] assign[=] call[name[Match], parameter[constant[^(\s*template\s*)<], call[name[clean_lines].elided][name[func_line]]]]
if name[match] begin[:]
variable[start_col] assign[=] call[name[len], parameter[call[name[match].group, parameter[constant[1]]]]]
<ast.Tuple object at 0x7da20c991120> assign[=] call[name[CloseExpression], parameter[name[clean_lines], name[func_line], name[start_col]]]
if <ast.BoolOp object at 0x7da20c993d60> begin[:]
<ast.AugAssign object at 0x7da20c993520>
variable[argument_list] assign[=] call[call[name[clean_lines].elided][name[func_line]]][<ast.Slice object at 0x7da20c992fb0>]
if <ast.UnaryOp object at 0x7da20c992dd0> begin[:]
return[call[name[set], parameter[]]]
variable[typenames] assign[=] call[name[set], parameter[]]
while constant[True] begin[:]
variable[match] assign[=] call[name[Match], parameter[constant[^[,\s]*(?:typename|class)(?:\.\.\.)?\s+(\w+)(.*)$], name[argument_list]]]
if <ast.UnaryOp object at 0x7da20c993430> begin[:]
break
call[name[typenames].add, parameter[call[name[match].group, parameter[constant[1]]]]]
variable[argument_list] assign[=] call[name[match].group, parameter[constant[2]]]
return[name[typenames]] | keyword[def] identifier[GetTemplateArgs] ( identifier[clean_lines] , identifier[linenum] ):
literal[string]
identifier[func_line] = identifier[linenum]
keyword[while] identifier[func_line] > literal[int] :
identifier[line] = identifier[clean_lines] . identifier[elided] [ identifier[func_line] ]
keyword[if] identifier[Match] ( literal[string] , identifier[line] ):
keyword[return] identifier[set] ()
keyword[if] identifier[line] . identifier[find] ( literal[string] )>= literal[int] :
keyword[break]
identifier[func_line] -= literal[int]
keyword[if] identifier[func_line] == literal[int] :
keyword[return] identifier[set] ()
identifier[argument_list] = literal[string]
identifier[match] = identifier[Match] ( literal[string] , identifier[clean_lines] . identifier[elided] [ identifier[func_line] ])
keyword[if] identifier[match] :
identifier[start_col] = identifier[len] ( identifier[match] . identifier[group] ( literal[int] ))
identifier[_] , identifier[end_line] , identifier[end_col] = identifier[CloseExpression] ( identifier[clean_lines] , identifier[func_line] , identifier[start_col] )
keyword[if] identifier[end_col] >- literal[int] keyword[and] identifier[end_line] == identifier[func_line] :
identifier[start_col] += literal[int]
identifier[argument_list] = identifier[clean_lines] . identifier[elided] [ identifier[func_line] ][ identifier[start_col] : identifier[end_col] ]
keyword[elif] identifier[func_line] > literal[int] :
identifier[match] = identifier[Match] ( literal[string] , identifier[clean_lines] . identifier[elided] [ identifier[func_line] - literal[int] ])
keyword[if] identifier[match] :
identifier[end_col] = identifier[len] ( identifier[match] . identifier[group] ( literal[int] ))
identifier[_] , identifier[start_line] , identifier[start_col] = identifier[ReverseCloseExpression] (
identifier[clean_lines] , identifier[func_line] - literal[int] , identifier[end_col] )
keyword[if] identifier[start_col] >- literal[int] :
identifier[start_col] += literal[int]
keyword[while] identifier[start_line] < identifier[func_line] - literal[int] :
identifier[argument_list] += identifier[clean_lines] . identifier[elided] [ identifier[start_line] ][ identifier[start_col] :]
identifier[start_col] = literal[int]
identifier[start_line] += literal[int]
identifier[argument_list] += identifier[clean_lines] . identifier[elided] [ identifier[func_line] - literal[int] ][ identifier[start_col] : identifier[end_col] ]
keyword[if] keyword[not] identifier[argument_list] :
keyword[return] identifier[set] ()
identifier[typenames] = identifier[set] ()
keyword[while] keyword[True] :
identifier[match] = identifier[Match] ( literal[string] ,
identifier[argument_list] )
keyword[if] keyword[not] identifier[match] :
keyword[break]
identifier[typenames] . identifier[add] ( identifier[match] . identifier[group] ( literal[int] ))
identifier[argument_list] = identifier[match] . identifier[group] ( literal[int] )
keyword[return] identifier[typenames] | def GetTemplateArgs(clean_lines, linenum):
"""Find list of template arguments associated with this function declaration.
Args:
clean_lines: A CleansedLines instance containing the file.
linenum: Line number containing the start of the function declaration,
usually one line after the end of the template-argument-list.
Returns:
Set of type names, or empty set if this does not appear to have
any template parameters.
"""
# Find start of function
func_line = linenum
while func_line > 0:
line = clean_lines.elided[func_line]
if Match('^\\s*$', line):
return set() # depends on [control=['if'], data=[]]
if line.find('(') >= 0:
break # depends on [control=['if'], data=[]]
func_line -= 1 # depends on [control=['while'], data=['func_line']]
if func_line == 0:
return set() # depends on [control=['if'], data=[]]
# Collapse template-argument-list into a single string
argument_list = ''
match = Match('^(\\s*template\\s*)<', clean_lines.elided[func_line])
if match:
# template-argument-list on the same line as function name
start_col = len(match.group(1))
(_, end_line, end_col) = CloseExpression(clean_lines, func_line, start_col)
if end_col > -1 and end_line == func_line:
start_col += 1 # Skip the opening bracket
argument_list = clean_lines.elided[func_line][start_col:end_col] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif func_line > 1:
# template-argument-list one line before function name
match = Match('^(.*)>\\s*$', clean_lines.elided[func_line - 1])
if match:
end_col = len(match.group(1))
(_, start_line, start_col) = ReverseCloseExpression(clean_lines, func_line - 1, end_col)
if start_col > -1:
start_col += 1 # Skip the opening bracket
while start_line < func_line - 1:
argument_list += clean_lines.elided[start_line][start_col:]
start_col = 0
start_line += 1 # depends on [control=['while'], data=['start_line']]
argument_list += clean_lines.elided[func_line - 1][start_col:end_col] # depends on [control=['if'], data=['start_col']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['func_line']]
if not argument_list:
return set() # depends on [control=['if'], data=[]]
# Extract type names
typenames = set()
while True:
match = Match('^[,\\s]*(?:typename|class)(?:\\.\\.\\.)?\\s+(\\w+)(.*)$', argument_list)
if not match:
break # depends on [control=['if'], data=[]]
typenames.add(match.group(1))
argument_list = match.group(2) # depends on [control=['while'], data=[]]
return typenames |
def from_dict(self, dic):
"""Import data from a {cluster: {field: value}} dictionary."""
self._reset_data()
for cluster, vals in dic.items():
for field, value in vals.items():
self.set(field, [cluster], value, add_to_stack=False)
self._data_base = deepcopy(self._data) | def function[from_dict, parameter[self, dic]]:
constant[Import data from a {cluster: {field: value}} dictionary.]
call[name[self]._reset_data, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b1391d80>, <ast.Name object at 0x7da1b1392fe0>]]] in starred[call[name[dic].items, parameter[]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1391270>, <ast.Name object at 0x7da1b1393670>]]] in starred[call[name[vals].items, parameter[]]] begin[:]
call[name[self].set, parameter[name[field], list[[<ast.Name object at 0x7da1b1391c30>]], name[value]]]
name[self]._data_base assign[=] call[name[deepcopy], parameter[name[self]._data]] | keyword[def] identifier[from_dict] ( identifier[self] , identifier[dic] ):
literal[string]
identifier[self] . identifier[_reset_data] ()
keyword[for] identifier[cluster] , identifier[vals] keyword[in] identifier[dic] . identifier[items] ():
keyword[for] identifier[field] , identifier[value] keyword[in] identifier[vals] . identifier[items] ():
identifier[self] . identifier[set] ( identifier[field] ,[ identifier[cluster] ], identifier[value] , identifier[add_to_stack] = keyword[False] )
identifier[self] . identifier[_data_base] = identifier[deepcopy] ( identifier[self] . identifier[_data] ) | def from_dict(self, dic):
"""Import data from a {cluster: {field: value}} dictionary."""
self._reset_data()
for (cluster, vals) in dic.items():
for (field, value) in vals.items():
self.set(field, [cluster], value, add_to_stack=False) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
self._data_base = deepcopy(self._data) |
def _clean_intenum(obj):
"""Remove all IntEnum classes from a map."""
if isinstance(obj, dict):
for key, value in obj.items():
if isinstance(value, IntEnum):
obj[key] = value.value
elif isinstance(value, (dict, list)):
obj[key] = _clean_intenum(value)
elif isinstance(obj, list):
for i, value in enumerate(obj):
if isinstance(value, IntEnum):
obj[i] = value.value
elif isinstance(value, (dict, list)):
obj[i] = _clean_intenum(value)
return obj | def function[_clean_intenum, parameter[obj]]:
constant[Remove all IntEnum classes from a map.]
if call[name[isinstance], parameter[name[obj], name[dict]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da204345810>, <ast.Name object at 0x7da204347580>]]] in starred[call[name[obj].items, parameter[]]] begin[:]
if call[name[isinstance], parameter[name[value], name[IntEnum]]] begin[:]
call[name[obj]][name[key]] assign[=] name[value].value
return[name[obj]] | keyword[def] identifier[_clean_intenum] ( identifier[obj] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[dict] ):
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[obj] . identifier[items] ():
keyword[if] identifier[isinstance] ( identifier[value] , identifier[IntEnum] ):
identifier[obj] [ identifier[key] ]= identifier[value] . identifier[value]
keyword[elif] identifier[isinstance] ( identifier[value] ,( identifier[dict] , identifier[list] )):
identifier[obj] [ identifier[key] ]= identifier[_clean_intenum] ( identifier[value] )
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[list] ):
keyword[for] identifier[i] , identifier[value] keyword[in] identifier[enumerate] ( identifier[obj] ):
keyword[if] identifier[isinstance] ( identifier[value] , identifier[IntEnum] ):
identifier[obj] [ identifier[i] ]= identifier[value] . identifier[value]
keyword[elif] identifier[isinstance] ( identifier[value] ,( identifier[dict] , identifier[list] )):
identifier[obj] [ identifier[i] ]= identifier[_clean_intenum] ( identifier[value] )
keyword[return] identifier[obj] | def _clean_intenum(obj):
"""Remove all IntEnum classes from a map."""
if isinstance(obj, dict):
for (key, value) in obj.items():
if isinstance(value, IntEnum):
obj[key] = value.value # depends on [control=['if'], data=[]]
elif isinstance(value, (dict, list)):
obj[key] = _clean_intenum(value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
elif isinstance(obj, list):
for (i, value) in enumerate(obj):
if isinstance(value, IntEnum):
obj[i] = value.value # depends on [control=['if'], data=[]]
elif isinstance(value, (dict, list)):
obj[i] = _clean_intenum(value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
return obj |
def get_release_notes(osa_repo_dir, osa_old_commit, osa_new_commit):
"""Get release notes between the two revisions."""
repo = Repo(osa_repo_dir)
# Get a list of tags, sorted
tags = repo.git.tag().split('\n')
tags = sorted(tags, key=LooseVersion)
# Currently major tags are being printed after rc and
# b tags. We need to fix the list so that major
# tags are printed before rc and b releases
tags = _fix_tags_list(tags)
# Find the closest tag from a given SHA
# The tag found here is the tag that was cut
# either on or before the given SHA
checkout(repo, osa_old_commit)
old_tag = repo.git.describe()
# If the SHA given is between two release tags, then
# 'git describe' will return a tag in form of
# <tag>-<commitNum>-<sha>. For example:
# 14.0.2-3-g6931e26
# Since reno does not support this format, we need to
# strip away the commit number and sha bits.
if '-' in old_tag:
old_tag = old_tag[0:old_tag.index('-')]
# Get the nearest tag associated with the new commit
checkout(repo, osa_new_commit)
new_tag = repo.git.describe()
if '-' in new_tag:
nearest_new_tag = new_tag[0:new_tag.index('-')]
else:
nearest_new_tag = new_tag
# Truncate the tags list to only include versions
# between old_sha and new_sha. The latest release
# is not included in this list. That version will be
# printed separately in the following step.
tags = tags[tags.index(old_tag):tags.index(nearest_new_tag)]
release_notes = ""
# Checkout the new commit, then run reno to get the latest
# releasenotes that have been created or updated between
# the latest release and this new commit.
repo.git.checkout(osa_new_commit, '-f')
reno_report_command = ['reno',
'report',
'--earliest-version',
nearest_new_tag]
reno_report_p = subprocess.Popen(reno_report_command,
cwd=osa_repo_dir,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
reno_output = reno_report_p.communicate()[0].decode('UTF-8')
release_notes += reno_output
# We want to start with the latest packaged release first, so
# the tags list is reversed
for version in reversed(tags):
# If version is an rc or b tag, and it has a major
# release tag, then skip it. There is no need to print
# release notes for an rc or b release unless we are
# comparing shas between two rc or b releases.
repo.git.checkout(version, '-f')
# We are outputing one version at a time here
reno_report_command = ['reno',
'report',
'--branch',
version,
'--earliest-version',
version]
reno_report_p = subprocess.Popen(reno_report_command,
cwd=osa_repo_dir,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
reno_output = reno_report_p.communicate()[0].decode('UTF-8')
# We need to ensure the output includes the version we are concerned
# about.
# This is due to https://bugs.launchpad.net/reno/+bug/1670173
if version in reno_output:
release_notes += reno_output
# Clean up "Release Notes" title. We don't need this title for
# each tagged release.
release_notes = release_notes.replace(
"=============\nRelease Notes\n=============",
""
)
# Replace headers that contain '=' with '~' to comply with osa-differ's
# formatting
release_notes = re.sub('===+', _equal_to_tilde, release_notes)
# Replace headers that contain '-' with '#' to comply with osa-differ's
# formatting
release_notes = re.sub('---+', _dash_to_num, release_notes)
return release_notes | def function[get_release_notes, parameter[osa_repo_dir, osa_old_commit, osa_new_commit]]:
constant[Get release notes between the two revisions.]
variable[repo] assign[=] call[name[Repo], parameter[name[osa_repo_dir]]]
variable[tags] assign[=] call[call[name[repo].git.tag, parameter[]].split, parameter[constant[
]]]
variable[tags] assign[=] call[name[sorted], parameter[name[tags]]]
variable[tags] assign[=] call[name[_fix_tags_list], parameter[name[tags]]]
call[name[checkout], parameter[name[repo], name[osa_old_commit]]]
variable[old_tag] assign[=] call[name[repo].git.describe, parameter[]]
if compare[constant[-] in name[old_tag]] begin[:]
variable[old_tag] assign[=] call[name[old_tag]][<ast.Slice object at 0x7da2041d9a80>]
call[name[checkout], parameter[name[repo], name[osa_new_commit]]]
variable[new_tag] assign[=] call[name[repo].git.describe, parameter[]]
if compare[constant[-] in name[new_tag]] begin[:]
variable[nearest_new_tag] assign[=] call[name[new_tag]][<ast.Slice object at 0x7da2041d8250>]
variable[tags] assign[=] call[name[tags]][<ast.Slice object at 0x7da2041da9b0>]
variable[release_notes] assign[=] constant[]
call[name[repo].git.checkout, parameter[name[osa_new_commit], constant[-f]]]
variable[reno_report_command] assign[=] list[[<ast.Constant object at 0x7da2041d91b0>, <ast.Constant object at 0x7da2041d9090>, <ast.Constant object at 0x7da2041d9330>, <ast.Name object at 0x7da2041dbd90>]]
variable[reno_report_p] assign[=] call[name[subprocess].Popen, parameter[name[reno_report_command]]]
variable[reno_output] assign[=] call[call[call[name[reno_report_p].communicate, parameter[]]][constant[0]].decode, parameter[constant[UTF-8]]]
<ast.AugAssign object at 0x7da2041dbf10>
for taget[name[version]] in starred[call[name[reversed], parameter[name[tags]]]] begin[:]
call[name[repo].git.checkout, parameter[name[version], constant[-f]]]
variable[reno_report_command] assign[=] list[[<ast.Constant object at 0x7da2041dabf0>, <ast.Constant object at 0x7da2041da800>, <ast.Constant object at 0x7da2041dbc70>, <ast.Name object at 0x7da2041dbaf0>, <ast.Constant object at 0x7da2041db3d0>, <ast.Name object at 0x7da2041daa40>]]
variable[reno_report_p] assign[=] call[name[subprocess].Popen, parameter[name[reno_report_command]]]
variable[reno_output] assign[=] call[call[call[name[reno_report_p].communicate, parameter[]]][constant[0]].decode, parameter[constant[UTF-8]]]
if compare[name[version] in name[reno_output]] begin[:]
<ast.AugAssign object at 0x7da2041dbca0>
variable[release_notes] assign[=] call[name[release_notes].replace, parameter[constant[=============
Release Notes
=============], constant[]]]
variable[release_notes] assign[=] call[name[re].sub, parameter[constant[===+], name[_equal_to_tilde], name[release_notes]]]
variable[release_notes] assign[=] call[name[re].sub, parameter[constant[---+], name[_dash_to_num], name[release_notes]]]
return[name[release_notes]] | keyword[def] identifier[get_release_notes] ( identifier[osa_repo_dir] , identifier[osa_old_commit] , identifier[osa_new_commit] ):
literal[string]
identifier[repo] = identifier[Repo] ( identifier[osa_repo_dir] )
identifier[tags] = identifier[repo] . identifier[git] . identifier[tag] (). identifier[split] ( literal[string] )
identifier[tags] = identifier[sorted] ( identifier[tags] , identifier[key] = identifier[LooseVersion] )
identifier[tags] = identifier[_fix_tags_list] ( identifier[tags] )
identifier[checkout] ( identifier[repo] , identifier[osa_old_commit] )
identifier[old_tag] = identifier[repo] . identifier[git] . identifier[describe] ()
keyword[if] literal[string] keyword[in] identifier[old_tag] :
identifier[old_tag] = identifier[old_tag] [ literal[int] : identifier[old_tag] . identifier[index] ( literal[string] )]
identifier[checkout] ( identifier[repo] , identifier[osa_new_commit] )
identifier[new_tag] = identifier[repo] . identifier[git] . identifier[describe] ()
keyword[if] literal[string] keyword[in] identifier[new_tag] :
identifier[nearest_new_tag] = identifier[new_tag] [ literal[int] : identifier[new_tag] . identifier[index] ( literal[string] )]
keyword[else] :
identifier[nearest_new_tag] = identifier[new_tag]
identifier[tags] = identifier[tags] [ identifier[tags] . identifier[index] ( identifier[old_tag] ): identifier[tags] . identifier[index] ( identifier[nearest_new_tag] )]
identifier[release_notes] = literal[string]
identifier[repo] . identifier[git] . identifier[checkout] ( identifier[osa_new_commit] , literal[string] )
identifier[reno_report_command] =[ literal[string] ,
literal[string] ,
literal[string] ,
identifier[nearest_new_tag] ]
identifier[reno_report_p] = identifier[subprocess] . identifier[Popen] ( identifier[reno_report_command] ,
identifier[cwd] = identifier[osa_repo_dir] ,
identifier[stdout] = identifier[subprocess] . identifier[PIPE] ,
identifier[stderr] = identifier[subprocess] . identifier[PIPE] )
identifier[reno_output] = identifier[reno_report_p] . identifier[communicate] ()[ literal[int] ]. identifier[decode] ( literal[string] )
identifier[release_notes] += identifier[reno_output]
keyword[for] identifier[version] keyword[in] identifier[reversed] ( identifier[tags] ):
identifier[repo] . identifier[git] . identifier[checkout] ( identifier[version] , literal[string] )
identifier[reno_report_command] =[ literal[string] ,
literal[string] ,
literal[string] ,
identifier[version] ,
literal[string] ,
identifier[version] ]
identifier[reno_report_p] = identifier[subprocess] . identifier[Popen] ( identifier[reno_report_command] ,
identifier[cwd] = identifier[osa_repo_dir] ,
identifier[stdout] = identifier[subprocess] . identifier[PIPE] ,
identifier[stderr] = identifier[subprocess] . identifier[PIPE] )
identifier[reno_output] = identifier[reno_report_p] . identifier[communicate] ()[ literal[int] ]. identifier[decode] ( literal[string] )
keyword[if] identifier[version] keyword[in] identifier[reno_output] :
identifier[release_notes] += identifier[reno_output]
identifier[release_notes] = identifier[release_notes] . identifier[replace] (
literal[string] ,
literal[string]
)
identifier[release_notes] = identifier[re] . identifier[sub] ( literal[string] , identifier[_equal_to_tilde] , identifier[release_notes] )
identifier[release_notes] = identifier[re] . identifier[sub] ( literal[string] , identifier[_dash_to_num] , identifier[release_notes] )
keyword[return] identifier[release_notes] | def get_release_notes(osa_repo_dir, osa_old_commit, osa_new_commit):
"""Get release notes between the two revisions."""
repo = Repo(osa_repo_dir)
# Get a list of tags, sorted
tags = repo.git.tag().split('\n')
tags = sorted(tags, key=LooseVersion)
# Currently major tags are being printed after rc and
# b tags. We need to fix the list so that major
# tags are printed before rc and b releases
tags = _fix_tags_list(tags)
# Find the closest tag from a given SHA
# The tag found here is the tag that was cut
# either on or before the given SHA
checkout(repo, osa_old_commit)
old_tag = repo.git.describe()
# If the SHA given is between two release tags, then
# 'git describe' will return a tag in form of
# <tag>-<commitNum>-<sha>. For example:
# 14.0.2-3-g6931e26
# Since reno does not support this format, we need to
# strip away the commit number and sha bits.
if '-' in old_tag:
old_tag = old_tag[0:old_tag.index('-')] # depends on [control=['if'], data=['old_tag']]
# Get the nearest tag associated with the new commit
checkout(repo, osa_new_commit)
new_tag = repo.git.describe()
if '-' in new_tag:
nearest_new_tag = new_tag[0:new_tag.index('-')] # depends on [control=['if'], data=['new_tag']]
else:
nearest_new_tag = new_tag
# Truncate the tags list to only include versions
# between old_sha and new_sha. The latest release
# is not included in this list. That version will be
# printed separately in the following step.
tags = tags[tags.index(old_tag):tags.index(nearest_new_tag)]
release_notes = ''
# Checkout the new commit, then run reno to get the latest
# releasenotes that have been created or updated between
# the latest release and this new commit.
repo.git.checkout(osa_new_commit, '-f')
reno_report_command = ['reno', 'report', '--earliest-version', nearest_new_tag]
reno_report_p = subprocess.Popen(reno_report_command, cwd=osa_repo_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
reno_output = reno_report_p.communicate()[0].decode('UTF-8')
release_notes += reno_output
# We want to start with the latest packaged release first, so
# the tags list is reversed
for version in reversed(tags):
# If version is an rc or b tag, and it has a major
# release tag, then skip it. There is no need to print
# release notes for an rc or b release unless we are
# comparing shas between two rc or b releases.
repo.git.checkout(version, '-f')
# We are outputing one version at a time here
reno_report_command = ['reno', 'report', '--branch', version, '--earliest-version', version]
reno_report_p = subprocess.Popen(reno_report_command, cwd=osa_repo_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
reno_output = reno_report_p.communicate()[0].decode('UTF-8')
# We need to ensure the output includes the version we are concerned
# about.
# This is due to https://bugs.launchpad.net/reno/+bug/1670173
if version in reno_output:
release_notes += reno_output # depends on [control=['if'], data=['reno_output']] # depends on [control=['for'], data=['version']]
# Clean up "Release Notes" title. We don't need this title for
# each tagged release.
release_notes = release_notes.replace('=============\nRelease Notes\n=============', '')
# Replace headers that contain '=' with '~' to comply with osa-differ's
# formatting
release_notes = re.sub('===+', _equal_to_tilde, release_notes)
# Replace headers that contain '-' with '#' to comply with osa-differ's
# formatting
release_notes = re.sub('---+', _dash_to_num, release_notes)
return release_notes |
def get_edge(self, src_or_list, dst=None):
"""Retrieved an edge from the graph.
Given an edge's source and destination the corresponding
Edge instance(s) will be returned.
If one or more edges exist with that source and destination
a list of Edge instances is returned.
An empty list is returned otherwise.
"""
if isinstance( src_or_list, (list, tuple)) and dst is None:
edge_points = tuple(src_or_list)
edge_points_reverse = (edge_points[1], edge_points[0])
else:
edge_points = (src_or_list, dst)
edge_points_reverse = (dst, src_or_list)
match = list()
if edge_points in self.obj_dict['edges'] or (
self.get_top_graph_type() == 'graph' and
edge_points_reverse in self.obj_dict['edges']):
edges_obj_dict = self.obj_dict['edges'].get(
edge_points,
self.obj_dict['edges'].get( edge_points_reverse, None ))
for edge_obj_dict in edges_obj_dict:
match.append(
Edge(edge_points[0],
edge_points[1],
obj_dict=edge_obj_dict))
return match | def function[get_edge, parameter[self, src_or_list, dst]]:
constant[Retrieved an edge from the graph.
Given an edge's source and destination the corresponding
Edge instance(s) will be returned.
If one or more edges exist with that source and destination
a list of Edge instances is returned.
An empty list is returned otherwise.
]
if <ast.BoolOp object at 0x7da1b1e72200> begin[:]
variable[edge_points] assign[=] call[name[tuple], parameter[name[src_or_list]]]
variable[edge_points_reverse] assign[=] tuple[[<ast.Subscript object at 0x7da1b1e71db0>, <ast.Subscript object at 0x7da1b1e735b0>]]
variable[match] assign[=] call[name[list], parameter[]]
if <ast.BoolOp object at 0x7da1b1e715d0> begin[:]
variable[edges_obj_dict] assign[=] call[call[name[self].obj_dict][constant[edges]].get, parameter[name[edge_points], call[call[name[self].obj_dict][constant[edges]].get, parameter[name[edge_points_reverse], constant[None]]]]]
for taget[name[edge_obj_dict]] in starred[name[edges_obj_dict]] begin[:]
call[name[match].append, parameter[call[name[Edge], parameter[call[name[edge_points]][constant[0]], call[name[edge_points]][constant[1]]]]]]
return[name[match]] | keyword[def] identifier[get_edge] ( identifier[self] , identifier[src_or_list] , identifier[dst] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[src_or_list] ,( identifier[list] , identifier[tuple] )) keyword[and] identifier[dst] keyword[is] keyword[None] :
identifier[edge_points] = identifier[tuple] ( identifier[src_or_list] )
identifier[edge_points_reverse] =( identifier[edge_points] [ literal[int] ], identifier[edge_points] [ literal[int] ])
keyword[else] :
identifier[edge_points] =( identifier[src_or_list] , identifier[dst] )
identifier[edge_points_reverse] =( identifier[dst] , identifier[src_or_list] )
identifier[match] = identifier[list] ()
keyword[if] identifier[edge_points] keyword[in] identifier[self] . identifier[obj_dict] [ literal[string] ] keyword[or] (
identifier[self] . identifier[get_top_graph_type] ()== literal[string] keyword[and]
identifier[edge_points_reverse] keyword[in] identifier[self] . identifier[obj_dict] [ literal[string] ]):
identifier[edges_obj_dict] = identifier[self] . identifier[obj_dict] [ literal[string] ]. identifier[get] (
identifier[edge_points] ,
identifier[self] . identifier[obj_dict] [ literal[string] ]. identifier[get] ( identifier[edge_points_reverse] , keyword[None] ))
keyword[for] identifier[edge_obj_dict] keyword[in] identifier[edges_obj_dict] :
identifier[match] . identifier[append] (
identifier[Edge] ( identifier[edge_points] [ literal[int] ],
identifier[edge_points] [ literal[int] ],
identifier[obj_dict] = identifier[edge_obj_dict] ))
keyword[return] identifier[match] | def get_edge(self, src_or_list, dst=None):
"""Retrieved an edge from the graph.
Given an edge's source and destination the corresponding
Edge instance(s) will be returned.
If one or more edges exist with that source and destination
a list of Edge instances is returned.
An empty list is returned otherwise.
"""
if isinstance(src_or_list, (list, tuple)) and dst is None:
edge_points = tuple(src_or_list)
edge_points_reverse = (edge_points[1], edge_points[0]) # depends on [control=['if'], data=[]]
else:
edge_points = (src_or_list, dst)
edge_points_reverse = (dst, src_or_list)
match = list()
if edge_points in self.obj_dict['edges'] or (self.get_top_graph_type() == 'graph' and edge_points_reverse in self.obj_dict['edges']):
edges_obj_dict = self.obj_dict['edges'].get(edge_points, self.obj_dict['edges'].get(edge_points_reverse, None))
for edge_obj_dict in edges_obj_dict:
match.append(Edge(edge_points[0], edge_points[1], obj_dict=edge_obj_dict)) # depends on [control=['for'], data=['edge_obj_dict']] # depends on [control=['if'], data=[]]
return match |
def pack_ip(ipstr):
"""Converts an ip address given in dotted notation to a four byte
string in network byte order.
>>> len(pack_ip("127.0.0.1"))
4
>>> pack_ip("foo")
Traceback (most recent call last):
...
ValueError: given ip address has an invalid number of dots
@type ipstr: str
@rtype: bytes
@raises ValueError: for badly formatted ip addresses
"""
if not isinstance(ipstr, basestring):
raise ValueError("given ip address is not a string")
parts = ipstr.split('.')
if len(parts) != 4:
raise ValueError("given ip address has an invalid number of dots")
parts = [int(x) for x in parts] # raises ValueError
return int_seq_to_bytes(parts) | def function[pack_ip, parameter[ipstr]]:
constant[Converts an ip address given in dotted notation to a four byte
string in network byte order.
>>> len(pack_ip("127.0.0.1"))
4
>>> pack_ip("foo")
Traceback (most recent call last):
...
ValueError: given ip address has an invalid number of dots
@type ipstr: str
@rtype: bytes
@raises ValueError: for badly formatted ip addresses
]
if <ast.UnaryOp object at 0x7da18f8119c0> begin[:]
<ast.Raise object at 0x7da18f812980>
variable[parts] assign[=] call[name[ipstr].split, parameter[constant[.]]]
if compare[call[name[len], parameter[name[parts]]] not_equal[!=] constant[4]] begin[:]
<ast.Raise object at 0x7da18f813c40>
variable[parts] assign[=] <ast.ListComp object at 0x7da18f812200>
return[call[name[int_seq_to_bytes], parameter[name[parts]]]] | keyword[def] identifier[pack_ip] ( identifier[ipstr] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[ipstr] , identifier[basestring] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[parts] = identifier[ipstr] . identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[parts] )!= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[parts] =[ identifier[int] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[parts] ]
keyword[return] identifier[int_seq_to_bytes] ( identifier[parts] ) | def pack_ip(ipstr):
"""Converts an ip address given in dotted notation to a four byte
string in network byte order.
>>> len(pack_ip("127.0.0.1"))
4
>>> pack_ip("foo")
Traceback (most recent call last):
...
ValueError: given ip address has an invalid number of dots
@type ipstr: str
@rtype: bytes
@raises ValueError: for badly formatted ip addresses
"""
if not isinstance(ipstr, basestring):
raise ValueError('given ip address is not a string') # depends on [control=['if'], data=[]]
parts = ipstr.split('.')
if len(parts) != 4:
raise ValueError('given ip address has an invalid number of dots') # depends on [control=['if'], data=[]]
parts = [int(x) for x in parts] # raises ValueError
return int_seq_to_bytes(parts) |
def inputRect(self):
"""
Returns the bounding rectangle for the input node associated with this
connection. If only a point is provided, then a 0 width rect will be
used.
:return <QRectF>
"""
try:
return self._inputNode.sceneRect()
except AttributeError:
point = self.inputPoint()
return QRectF(point.x(), point.y(), 0, 0) | def function[inputRect, parameter[self]]:
constant[
Returns the bounding rectangle for the input node associated with this
connection. If only a point is provided, then a 0 width rect will be
used.
:return <QRectF>
]
<ast.Try object at 0x7da1b24fdc90> | keyword[def] identifier[inputRect] ( identifier[self] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] . identifier[_inputNode] . identifier[sceneRect] ()
keyword[except] identifier[AttributeError] :
identifier[point] = identifier[self] . identifier[inputPoint] ()
keyword[return] identifier[QRectF] ( identifier[point] . identifier[x] (), identifier[point] . identifier[y] (), literal[int] , literal[int] ) | def inputRect(self):
"""
Returns the bounding rectangle for the input node associated with this
connection. If only a point is provided, then a 0 width rect will be
used.
:return <QRectF>
"""
try:
return self._inputNode.sceneRect() # depends on [control=['try'], data=[]]
except AttributeError:
point = self.inputPoint()
return QRectF(point.x(), point.y(), 0, 0) # depends on [control=['except'], data=[]] |
def sayHelloAsync(self, name="Not given", message="nothing"):
"""
Implementation of IHello.sayHelloAsync.
This method will be executed via some thread, and the remote caller
will not block.
This method should return either a String result (since the return type
of IHello.sayHelloAsync is CompletableFuture<String>, OR a Future that
returns a python string. In this case, it returns the string directly.
"""
print(
"Python.sayHelloAsync called by: {0} "
"with message: '{1}'".format(name, message)
)
return (
"PythonAsync says: Howdy {0} "
"that's a nice runtime you got there".format(name)
) | def function[sayHelloAsync, parameter[self, name, message]]:
constant[
Implementation of IHello.sayHelloAsync.
This method will be executed via some thread, and the remote caller
will not block.
This method should return either a String result (since the return type
of IHello.sayHelloAsync is CompletableFuture<String>, OR a Future that
returns a python string. In this case, it returns the string directly.
]
call[name[print], parameter[call[constant[Python.sayHelloAsync called by: {0} with message: '{1}'].format, parameter[name[name], name[message]]]]]
return[call[constant[PythonAsync says: Howdy {0} that's a nice runtime you got there].format, parameter[name[name]]]] | keyword[def] identifier[sayHelloAsync] ( identifier[self] , identifier[name] = literal[string] , identifier[message] = literal[string] ):
literal[string]
identifier[print] (
literal[string]
literal[string] . identifier[format] ( identifier[name] , identifier[message] )
)
keyword[return] (
literal[string]
literal[string] . identifier[format] ( identifier[name] )
) | def sayHelloAsync(self, name='Not given', message='nothing'):
"""
Implementation of IHello.sayHelloAsync.
This method will be executed via some thread, and the remote caller
will not block.
This method should return either a String result (since the return type
of IHello.sayHelloAsync is CompletableFuture<String>, OR a Future that
returns a python string. In this case, it returns the string directly.
"""
print("Python.sayHelloAsync called by: {0} with message: '{1}'".format(name, message))
return "PythonAsync says: Howdy {0} that's a nice runtime you got there".format(name) |
def CreateSnapshot(self,delete_existing=True,expiration_days=7):
"""Take a Hypervisor level snapshot retained for between 1 and 10 days (7 is default).
Currently only one snapshop may exist at a time, thus will delete snapshots if one already
exists before taking this snapshot.
>>> clc.v2.Server("WA1BTDIAPI219").CreateSnapshot(2)
<clc.APIv2.queue.Requests object at 0x10d106cd0>
>>> _.WaitUntilComplete()
0
"""
if len(self.data['details']['snapshots']):
if delete_existing: self.DeleteSnapshot()
else: raise(clc.CLCException("Snapshot already exists cannot take another"))
return(clc.v2.Requests(clc.v2.API.Call('POST','operations/%s/servers/createSnapshot' % (self.alias),
{'serverIds': self.id, 'snapshotExpirationDays': expiration_days},
session=self.session),
alias=self.alias,
session=self.session)) | def function[CreateSnapshot, parameter[self, delete_existing, expiration_days]]:
constant[Take a Hypervisor level snapshot retained for between 1 and 10 days (7 is default).
Currently only one snapshop may exist at a time, thus will delete snapshots if one already
exists before taking this snapshot.
>>> clc.v2.Server("WA1BTDIAPI219").CreateSnapshot(2)
<clc.APIv2.queue.Requests object at 0x10d106cd0>
>>> _.WaitUntilComplete()
0
]
if call[name[len], parameter[call[call[name[self].data][constant[details]]][constant[snapshots]]]] begin[:]
if name[delete_existing] begin[:]
call[name[self].DeleteSnapshot, parameter[]]
return[call[name[clc].v2.Requests, parameter[call[name[clc].v2.API.Call, parameter[constant[POST], binary_operation[constant[operations/%s/servers/createSnapshot] <ast.Mod object at 0x7da2590d6920> name[self].alias], dictionary[[<ast.Constant object at 0x7da1b23c3a90>, <ast.Constant object at 0x7da1b23c22f0>], [<ast.Attribute object at 0x7da1b23c1540>, <ast.Name object at 0x7da1b23c3c10>]]]]]]] | keyword[def] identifier[CreateSnapshot] ( identifier[self] , identifier[delete_existing] = keyword[True] , identifier[expiration_days] = literal[int] ):
literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[data] [ literal[string] ][ literal[string] ]):
keyword[if] identifier[delete_existing] : identifier[self] . identifier[DeleteSnapshot] ()
keyword[else] : keyword[raise] ( identifier[clc] . identifier[CLCException] ( literal[string] ))
keyword[return] ( identifier[clc] . identifier[v2] . identifier[Requests] ( identifier[clc] . identifier[v2] . identifier[API] . identifier[Call] ( literal[string] , literal[string] %( identifier[self] . identifier[alias] ),
{ literal[string] : identifier[self] . identifier[id] , literal[string] : identifier[expiration_days] },
identifier[session] = identifier[self] . identifier[session] ),
identifier[alias] = identifier[self] . identifier[alias] ,
identifier[session] = identifier[self] . identifier[session] )) | def CreateSnapshot(self, delete_existing=True, expiration_days=7):
"""Take a Hypervisor level snapshot retained for between 1 and 10 days (7 is default).
Currently only one snapshop may exist at a time, thus will delete snapshots if one already
exists before taking this snapshot.
>>> clc.v2.Server("WA1BTDIAPI219").CreateSnapshot(2)
<clc.APIv2.queue.Requests object at 0x10d106cd0>
>>> _.WaitUntilComplete()
0
"""
if len(self.data['details']['snapshots']):
if delete_existing:
self.DeleteSnapshot() # depends on [control=['if'], data=[]]
else:
raise clc.CLCException('Snapshot already exists cannot take another') # depends on [control=['if'], data=[]]
return clc.v2.Requests(clc.v2.API.Call('POST', 'operations/%s/servers/createSnapshot' % self.alias, {'serverIds': self.id, 'snapshotExpirationDays': expiration_days}, session=self.session), alias=self.alias, session=self.session) |
def get_cms_model_order(model_name):
"""
Return a numeric ordering for a model name.
"""
for (name, order) in iteritems(appsettings.FLUENT_DASHBOARD_CMS_MODEL_ORDER):
if name in model_name:
return order
return 999 | def function[get_cms_model_order, parameter[model_name]]:
constant[
Return a numeric ordering for a model name.
]
for taget[tuple[[<ast.Name object at 0x7da2041db6d0>, <ast.Name object at 0x7da2041d8f10>]]] in starred[call[name[iteritems], parameter[name[appsettings].FLUENT_DASHBOARD_CMS_MODEL_ORDER]]] begin[:]
if compare[name[name] in name[model_name]] begin[:]
return[name[order]]
return[constant[999]] | keyword[def] identifier[get_cms_model_order] ( identifier[model_name] ):
literal[string]
keyword[for] ( identifier[name] , identifier[order] ) keyword[in] identifier[iteritems] ( identifier[appsettings] . identifier[FLUENT_DASHBOARD_CMS_MODEL_ORDER] ):
keyword[if] identifier[name] keyword[in] identifier[model_name] :
keyword[return] identifier[order]
keyword[return] literal[int] | def get_cms_model_order(model_name):
"""
Return a numeric ordering for a model name.
"""
for (name, order) in iteritems(appsettings.FLUENT_DASHBOARD_CMS_MODEL_ORDER):
if name in model_name:
return order # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return 999 |
def disable_buttons(self):
"""
Function disables buttons
"""
self.main_btn.set_sensitive(False)
self.back_btn.hide()
self.info_label.set_label('<span color="#FFA500">In progress...</span>')
self.disable_close_window()
if self.link is not None:
self.link.hide() | def function[disable_buttons, parameter[self]]:
constant[
Function disables buttons
]
call[name[self].main_btn.set_sensitive, parameter[constant[False]]]
call[name[self].back_btn.hide, parameter[]]
call[name[self].info_label.set_label, parameter[constant[<span color="#FFA500">In progress...</span>]]]
call[name[self].disable_close_window, parameter[]]
if compare[name[self].link is_not constant[None]] begin[:]
call[name[self].link.hide, parameter[]] | keyword[def] identifier[disable_buttons] ( identifier[self] ):
literal[string]
identifier[self] . identifier[main_btn] . identifier[set_sensitive] ( keyword[False] )
identifier[self] . identifier[back_btn] . identifier[hide] ()
identifier[self] . identifier[info_label] . identifier[set_label] ( literal[string] )
identifier[self] . identifier[disable_close_window] ()
keyword[if] identifier[self] . identifier[link] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[link] . identifier[hide] () | def disable_buttons(self):
"""
Function disables buttons
"""
self.main_btn.set_sensitive(False)
self.back_btn.hide()
self.info_label.set_label('<span color="#FFA500">In progress...</span>')
self.disable_close_window()
if self.link is not None:
self.link.hide() # depends on [control=['if'], data=[]] |
def sendline(self, s=''):
'''Wraps send(), sending string ``s`` to child process, with
``os.linesep`` automatically appended. Returns number of bytes
written. Only a limited number of bytes may be sent for each
line in the default terminal mode, see docstring of :meth:`send`.
'''
s = self._coerce_send_string(s)
return self.send(s + self.linesep) | def function[sendline, parameter[self, s]]:
constant[Wraps send(), sending string ``s`` to child process, with
``os.linesep`` automatically appended. Returns number of bytes
written. Only a limited number of bytes may be sent for each
line in the default terminal mode, see docstring of :meth:`send`.
]
variable[s] assign[=] call[name[self]._coerce_send_string, parameter[name[s]]]
return[call[name[self].send, parameter[binary_operation[name[s] + name[self].linesep]]]] | keyword[def] identifier[sendline] ( identifier[self] , identifier[s] = literal[string] ):
literal[string]
identifier[s] = identifier[self] . identifier[_coerce_send_string] ( identifier[s] )
keyword[return] identifier[self] . identifier[send] ( identifier[s] + identifier[self] . identifier[linesep] ) | def sendline(self, s=''):
"""Wraps send(), sending string ``s`` to child process, with
``os.linesep`` automatically appended. Returns number of bytes
written. Only a limited number of bytes may be sent for each
line in the default terminal mode, see docstring of :meth:`send`.
"""
s = self._coerce_send_string(s)
return self.send(s + self.linesep) |
def md5_for_file(f, block_size=2 ** 20):
"""Generate an MD5 has for a possibly large file by breaking it into chunks"""
import hashlib
md5 = hashlib.md5()
try:
# Guess that f is a FLO.
f.seek(0)
while True:
data = f.read(block_size)
if not data:
break
md5.update(data)
return md5.hexdigest()
except AttributeError as e:
# Nope, not a FLO. Maybe string?
file_name = f
with open(file_name, 'rb') as f:
return md5_for_file(f, block_size) | def function[md5_for_file, parameter[f, block_size]]:
constant[Generate an MD5 has for a possibly large file by breaking it into chunks]
import module[hashlib]
variable[md5] assign[=] call[name[hashlib].md5, parameter[]]
<ast.Try object at 0x7da207f9bca0> | keyword[def] identifier[md5_for_file] ( identifier[f] , identifier[block_size] = literal[int] ** literal[int] ):
literal[string]
keyword[import] identifier[hashlib]
identifier[md5] = identifier[hashlib] . identifier[md5] ()
keyword[try] :
identifier[f] . identifier[seek] ( literal[int] )
keyword[while] keyword[True] :
identifier[data] = identifier[f] . identifier[read] ( identifier[block_size] )
keyword[if] keyword[not] identifier[data] :
keyword[break]
identifier[md5] . identifier[update] ( identifier[data] )
keyword[return] identifier[md5] . identifier[hexdigest] ()
keyword[except] identifier[AttributeError] keyword[as] identifier[e] :
identifier[file_name] = identifier[f]
keyword[with] identifier[open] ( identifier[file_name] , literal[string] ) keyword[as] identifier[f] :
keyword[return] identifier[md5_for_file] ( identifier[f] , identifier[block_size] ) | def md5_for_file(f, block_size=2 ** 20):
"""Generate an MD5 has for a possibly large file by breaking it into chunks"""
import hashlib
md5 = hashlib.md5()
try:
# Guess that f is a FLO.
f.seek(0)
while True:
data = f.read(block_size)
if not data:
break # depends on [control=['if'], data=[]]
md5.update(data) # depends on [control=['while'], data=[]]
return md5.hexdigest() # depends on [control=['try'], data=[]]
except AttributeError as e:
# Nope, not a FLO. Maybe string?
file_name = f
with open(file_name, 'rb') as f:
return md5_for_file(f, block_size) # depends on [control=['with'], data=['f']] # depends on [control=['except'], data=[]] |
def qos_queue_scheduler_strict_priority_dwrr_traffic_class1(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
qos = ET.SubElement(config, "qos", xmlns="urn:brocade.com:mgmt:brocade-qos")
queue = ET.SubElement(qos, "queue")
scheduler = ET.SubElement(queue, "scheduler")
strict_priority = ET.SubElement(scheduler, "strict-priority")
dwrr_traffic_class1 = ET.SubElement(strict_priority, "dwrr-traffic-class1")
dwrr_traffic_class1.text = kwargs.pop('dwrr_traffic_class1')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[qos_queue_scheduler_strict_priority_dwrr_traffic_class1, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[qos] assign[=] call[name[ET].SubElement, parameter[name[config], constant[qos]]]
variable[queue] assign[=] call[name[ET].SubElement, parameter[name[qos], constant[queue]]]
variable[scheduler] assign[=] call[name[ET].SubElement, parameter[name[queue], constant[scheduler]]]
variable[strict_priority] assign[=] call[name[ET].SubElement, parameter[name[scheduler], constant[strict-priority]]]
variable[dwrr_traffic_class1] assign[=] call[name[ET].SubElement, parameter[name[strict_priority], constant[dwrr-traffic-class1]]]
name[dwrr_traffic_class1].text assign[=] call[name[kwargs].pop, parameter[constant[dwrr_traffic_class1]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[qos_queue_scheduler_strict_priority_dwrr_traffic_class1] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[qos] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[queue] = identifier[ET] . identifier[SubElement] ( identifier[qos] , literal[string] )
identifier[scheduler] = identifier[ET] . identifier[SubElement] ( identifier[queue] , literal[string] )
identifier[strict_priority] = identifier[ET] . identifier[SubElement] ( identifier[scheduler] , literal[string] )
identifier[dwrr_traffic_class1] = identifier[ET] . identifier[SubElement] ( identifier[strict_priority] , literal[string] )
identifier[dwrr_traffic_class1] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def qos_queue_scheduler_strict_priority_dwrr_traffic_class1(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
qos = ET.SubElement(config, 'qos', xmlns='urn:brocade.com:mgmt:brocade-qos')
queue = ET.SubElement(qos, 'queue')
scheduler = ET.SubElement(queue, 'scheduler')
strict_priority = ET.SubElement(scheduler, 'strict-priority')
dwrr_traffic_class1 = ET.SubElement(strict_priority, 'dwrr-traffic-class1')
dwrr_traffic_class1.text = kwargs.pop('dwrr_traffic_class1')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def _get_item(self, question_id):
"""we need a middle-man method to convert the unique "assessment-session"
authority question_ids into "real" itemIds
BUT this also has to return the "magic" item, so we can't rely
on
question = self.get_question(question_id)
ils = self._get_item_lookup_session()
return ils.get_item(Id(question._my_map['itemId']))
"""
question_map = self._get_question_map(question_id) # Throws NotFound()
real_question_id = Id(question_map['questionId'])
return self._get_item_lookup_session().get_item(real_question_id) | def function[_get_item, parameter[self, question_id]]:
constant[we need a middle-man method to convert the unique "assessment-session"
authority question_ids into "real" itemIds
BUT this also has to return the "magic" item, so we can't rely
on
question = self.get_question(question_id)
ils = self._get_item_lookup_session()
return ils.get_item(Id(question._my_map['itemId']))
]
variable[question_map] assign[=] call[name[self]._get_question_map, parameter[name[question_id]]]
variable[real_question_id] assign[=] call[name[Id], parameter[call[name[question_map]][constant[questionId]]]]
return[call[call[name[self]._get_item_lookup_session, parameter[]].get_item, parameter[name[real_question_id]]]] | keyword[def] identifier[_get_item] ( identifier[self] , identifier[question_id] ):
literal[string]
identifier[question_map] = identifier[self] . identifier[_get_question_map] ( identifier[question_id] )
identifier[real_question_id] = identifier[Id] ( identifier[question_map] [ literal[string] ])
keyword[return] identifier[self] . identifier[_get_item_lookup_session] (). identifier[get_item] ( identifier[real_question_id] ) | def _get_item(self, question_id):
"""we need a middle-man method to convert the unique "assessment-session"
authority question_ids into "real" itemIds
BUT this also has to return the "magic" item, so we can't rely
on
question = self.get_question(question_id)
ils = self._get_item_lookup_session()
return ils.get_item(Id(question._my_map['itemId']))
"""
question_map = self._get_question_map(question_id) # Throws NotFound()
real_question_id = Id(question_map['questionId'])
return self._get_item_lookup_session().get_item(real_question_id) |
def UnpackItems(*items, fields=None, defaults=None):
"""
>>> UnpackItems(0)
:param items:
:param fields:
:param defaults:
:return: callable
"""
defaults = defaults or {}
@use_context
@use_raw_input
def _UnpackItems(context, bag):
nonlocal fields, items, defaults
if fields is None:
fields = ()
for item in items:
fields += tuple(bag[item].keys())
context.set_output_fields(fields)
values = ()
for item in items:
values += tuple(bag[item].get(field, defaults.get(field)) for field in fields)
return values
return _UnpackItems | def function[UnpackItems, parameter[]]:
constant[
>>> UnpackItems(0)
:param items:
:param fields:
:param defaults:
:return: callable
]
variable[defaults] assign[=] <ast.BoolOp object at 0x7da20c990100>
def function[_UnpackItems, parameter[context, bag]]:
<ast.Nonlocal object at 0x7da20c990ca0>
if compare[name[fields] is constant[None]] begin[:]
variable[fields] assign[=] tuple[[]]
for taget[name[item]] in starred[name[items]] begin[:]
<ast.AugAssign object at 0x7da20c990670>
call[name[context].set_output_fields, parameter[name[fields]]]
variable[values] assign[=] tuple[[]]
for taget[name[item]] in starred[name[items]] begin[:]
<ast.AugAssign object at 0x7da20c6aaa10>
return[name[values]]
return[name[_UnpackItems]] | keyword[def] identifier[UnpackItems] (* identifier[items] , identifier[fields] = keyword[None] , identifier[defaults] = keyword[None] ):
literal[string]
identifier[defaults] = identifier[defaults] keyword[or] {}
@ identifier[use_context]
@ identifier[use_raw_input]
keyword[def] identifier[_UnpackItems] ( identifier[context] , identifier[bag] ):
keyword[nonlocal] identifier[fields] , identifier[items] , identifier[defaults]
keyword[if] identifier[fields] keyword[is] keyword[None] :
identifier[fields] =()
keyword[for] identifier[item] keyword[in] identifier[items] :
identifier[fields] += identifier[tuple] ( identifier[bag] [ identifier[item] ]. identifier[keys] ())
identifier[context] . identifier[set_output_fields] ( identifier[fields] )
identifier[values] =()
keyword[for] identifier[item] keyword[in] identifier[items] :
identifier[values] += identifier[tuple] ( identifier[bag] [ identifier[item] ]. identifier[get] ( identifier[field] , identifier[defaults] . identifier[get] ( identifier[field] )) keyword[for] identifier[field] keyword[in] identifier[fields] )
keyword[return] identifier[values]
keyword[return] identifier[_UnpackItems] | def UnpackItems(*items, fields=None, defaults=None):
"""
>>> UnpackItems(0)
:param items:
:param fields:
:param defaults:
:return: callable
"""
defaults = defaults or {}
@use_context
@use_raw_input
def _UnpackItems(context, bag):
nonlocal fields, items, defaults
if fields is None:
fields = ()
for item in items:
fields += tuple(bag[item].keys()) # depends on [control=['for'], data=['item']]
context.set_output_fields(fields) # depends on [control=['if'], data=['fields']]
values = ()
for item in items:
values += tuple((bag[item].get(field, defaults.get(field)) for field in fields)) # depends on [control=['for'], data=['item']]
return values
return _UnpackItems |
def install(self, connection, partition, table_name=None, index_columns=None, materialize=False,
logger=None):
""" Installs partition's mpr to the database to allow to execute sql queries over mpr.
Args:
connection:
partition (orm.Partition):
materialize (boolean): if True, create generic table. If False create MED over mpr.
Returns:
str: name of the created table.
"""
raise NotImplementedError | def function[install, parameter[self, connection, partition, table_name, index_columns, materialize, logger]]:
constant[ Installs partition's mpr to the database to allow to execute sql queries over mpr.
Args:
connection:
partition (orm.Partition):
materialize (boolean): if True, create generic table. If False create MED over mpr.
Returns:
str: name of the created table.
]
<ast.Raise object at 0x7da18c4cf6d0> | keyword[def] identifier[install] ( identifier[self] , identifier[connection] , identifier[partition] , identifier[table_name] = keyword[None] , identifier[index_columns] = keyword[None] , identifier[materialize] = keyword[False] ,
identifier[logger] = keyword[None] ):
literal[string]
keyword[raise] identifier[NotImplementedError] | def install(self, connection, partition, table_name=None, index_columns=None, materialize=False, logger=None):
""" Installs partition's mpr to the database to allow to execute sql queries over mpr.
Args:
connection:
partition (orm.Partition):
materialize (boolean): if True, create generic table. If False create MED over mpr.
Returns:
str: name of the created table.
"""
raise NotImplementedError |
def _rendered_size(text, point_size, font_file):
"""
Return a (width, height) pair representing the size of *text* in English
Metric Units (EMU) when rendered at *point_size* in the font defined in
*font_file*.
"""
emu_per_inch = 914400
px_per_inch = 72.0
font = _Fonts.font(font_file, point_size)
px_width, px_height = font.getsize(text)
emu_width = int(px_width / px_per_inch * emu_per_inch)
emu_height = int(px_height / px_per_inch * emu_per_inch)
return emu_width, emu_height | def function[_rendered_size, parameter[text, point_size, font_file]]:
constant[
Return a (width, height) pair representing the size of *text* in English
Metric Units (EMU) when rendered at *point_size* in the font defined in
*font_file*.
]
variable[emu_per_inch] assign[=] constant[914400]
variable[px_per_inch] assign[=] constant[72.0]
variable[font] assign[=] call[name[_Fonts].font, parameter[name[font_file], name[point_size]]]
<ast.Tuple object at 0x7da20c76c640> assign[=] call[name[font].getsize, parameter[name[text]]]
variable[emu_width] assign[=] call[name[int], parameter[binary_operation[binary_operation[name[px_width] / name[px_per_inch]] * name[emu_per_inch]]]]
variable[emu_height] assign[=] call[name[int], parameter[binary_operation[binary_operation[name[px_height] / name[px_per_inch]] * name[emu_per_inch]]]]
return[tuple[[<ast.Name object at 0x7da2049614b0>, <ast.Name object at 0x7da204962650>]]] | keyword[def] identifier[_rendered_size] ( identifier[text] , identifier[point_size] , identifier[font_file] ):
literal[string]
identifier[emu_per_inch] = literal[int]
identifier[px_per_inch] = literal[int]
identifier[font] = identifier[_Fonts] . identifier[font] ( identifier[font_file] , identifier[point_size] )
identifier[px_width] , identifier[px_height] = identifier[font] . identifier[getsize] ( identifier[text] )
identifier[emu_width] = identifier[int] ( identifier[px_width] / identifier[px_per_inch] * identifier[emu_per_inch] )
identifier[emu_height] = identifier[int] ( identifier[px_height] / identifier[px_per_inch] * identifier[emu_per_inch] )
keyword[return] identifier[emu_width] , identifier[emu_height] | def _rendered_size(text, point_size, font_file):
"""
Return a (width, height) pair representing the size of *text* in English
Metric Units (EMU) when rendered at *point_size* in the font defined in
*font_file*.
"""
emu_per_inch = 914400
px_per_inch = 72.0
font = _Fonts.font(font_file, point_size)
(px_width, px_height) = font.getsize(text)
emu_width = int(px_width / px_per_inch * emu_per_inch)
emu_height = int(px_height / px_per_inch * emu_per_inch)
return (emu_width, emu_height) |
def next_day_low_limit(self):
"明日跌停价"
return self.groupby(level=1).close.apply(lambda x: round((x + 0.0002)*0.9, 2)).sort_index() | def function[next_day_low_limit, parameter[self]]:
constant[明日跌停价]
return[call[call[call[name[self].groupby, parameter[]].close.apply, parameter[<ast.Lambda object at 0x7da1b2005f90>]].sort_index, parameter[]]] | keyword[def] identifier[next_day_low_limit] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[groupby] ( identifier[level] = literal[int] ). identifier[close] . identifier[apply] ( keyword[lambda] identifier[x] : identifier[round] (( identifier[x] + literal[int] )* literal[int] , literal[int] )). identifier[sort_index] () | def next_day_low_limit(self):
"""明日跌停价"""
return self.groupby(level=1).close.apply(lambda x: round((x + 0.0002) * 0.9, 2)).sort_index() |
def include(self, node):
"""Include the defined yaml file."""
result = None
if isinstance(node, ScalarNode):
result = Loader.include_file(self.construct_scalar(node))
else:
raise RuntimeError("Not supported !include on type %s" % type(node))
return result | def function[include, parameter[self, node]]:
constant[Include the defined yaml file.]
variable[result] assign[=] constant[None]
if call[name[isinstance], parameter[name[node], name[ScalarNode]]] begin[:]
variable[result] assign[=] call[name[Loader].include_file, parameter[call[name[self].construct_scalar, parameter[name[node]]]]]
return[name[result]] | keyword[def] identifier[include] ( identifier[self] , identifier[node] ):
literal[string]
identifier[result] = keyword[None]
keyword[if] identifier[isinstance] ( identifier[node] , identifier[ScalarNode] ):
identifier[result] = identifier[Loader] . identifier[include_file] ( identifier[self] . identifier[construct_scalar] ( identifier[node] ))
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[type] ( identifier[node] ))
keyword[return] identifier[result] | def include(self, node):
"""Include the defined yaml file."""
result = None
if isinstance(node, ScalarNode):
result = Loader.include_file(self.construct_scalar(node)) # depends on [control=['if'], data=[]]
else:
raise RuntimeError('Not supported !include on type %s' % type(node))
return result |
def _change_iscsi_target_settings(self, iscsi_info):
"""Change iSCSI target settings.
:param iscsi_info: A dictionary that contains information of iSCSI
target like target_name, lun, ip_address, port etc.
:raises: IloError, on an error from iLO.
"""
sushy_system = self._get_sushy_system(PROLIANT_SYSTEM_ID)
try:
pci_settings_map = (
sushy_system.bios_settings.bios_mappings.pci_settings_mappings)
nics = []
for mapping in pci_settings_map:
for subinstance in mapping['Subinstances']:
for association in subinstance['Associations']:
if 'NicBoot' in association:
nics.append(association)
except sushy.exceptions.SushyError as e:
msg = (self._('The Redfish controller failed to get the '
'bios mappings. Error %(error)s')
% {'error': str(e)})
LOG.debug(msg)
raise exception.IloError(msg)
if not nics:
msg = ('No nics were found on the system')
raise exception.IloError(msg)
# Set iSCSI info to all nics
iscsi_infos = []
for nic in nics:
data = iscsi_info.copy()
data['iSCSIAttemptName'] = nic
data['iSCSINicSource'] = nic
data['iSCSIAttemptInstance'] = nics.index(nic) + 1
iscsi_infos.append(data)
iscsi_data = {'iSCSISources': iscsi_infos}
try:
(sushy_system.bios_settings.iscsi_resource.
iscsi_settings.update_iscsi_settings(iscsi_data))
except sushy.exceptions.SushyError as e:
msg = (self._("The Redfish controller is failed to update iSCSI "
"settings. Error %(error)s") %
{'error': str(e)})
LOG.debug(msg)
raise exception.IloError(msg) | def function[_change_iscsi_target_settings, parameter[self, iscsi_info]]:
constant[Change iSCSI target settings.
:param iscsi_info: A dictionary that contains information of iSCSI
target like target_name, lun, ip_address, port etc.
:raises: IloError, on an error from iLO.
]
variable[sushy_system] assign[=] call[name[self]._get_sushy_system, parameter[name[PROLIANT_SYSTEM_ID]]]
<ast.Try object at 0x7da1b1991c90>
if <ast.UnaryOp object at 0x7da1b19921d0> begin[:]
variable[msg] assign[=] constant[No nics were found on the system]
<ast.Raise object at 0x7da1b1991000>
variable[iscsi_infos] assign[=] list[[]]
for taget[name[nic]] in starred[name[nics]] begin[:]
variable[data] assign[=] call[name[iscsi_info].copy, parameter[]]
call[name[data]][constant[iSCSIAttemptName]] assign[=] name[nic]
call[name[data]][constant[iSCSINicSource]] assign[=] name[nic]
call[name[data]][constant[iSCSIAttemptInstance]] assign[=] binary_operation[call[name[nics].index, parameter[name[nic]]] + constant[1]]
call[name[iscsi_infos].append, parameter[name[data]]]
variable[iscsi_data] assign[=] dictionary[[<ast.Constant object at 0x7da1b1991690>], [<ast.Name object at 0x7da1b1993a30>]]
<ast.Try object at 0x7da1b1991bd0> | keyword[def] identifier[_change_iscsi_target_settings] ( identifier[self] , identifier[iscsi_info] ):
literal[string]
identifier[sushy_system] = identifier[self] . identifier[_get_sushy_system] ( identifier[PROLIANT_SYSTEM_ID] )
keyword[try] :
identifier[pci_settings_map] =(
identifier[sushy_system] . identifier[bios_settings] . identifier[bios_mappings] . identifier[pci_settings_mappings] )
identifier[nics] =[]
keyword[for] identifier[mapping] keyword[in] identifier[pci_settings_map] :
keyword[for] identifier[subinstance] keyword[in] identifier[mapping] [ literal[string] ]:
keyword[for] identifier[association] keyword[in] identifier[subinstance] [ literal[string] ]:
keyword[if] literal[string] keyword[in] identifier[association] :
identifier[nics] . identifier[append] ( identifier[association] )
keyword[except] identifier[sushy] . identifier[exceptions] . identifier[SushyError] keyword[as] identifier[e] :
identifier[msg] =( identifier[self] . identifier[_] ( literal[string]
literal[string] )
%{ literal[string] : identifier[str] ( identifier[e] )})
identifier[LOG] . identifier[debug] ( identifier[msg] )
keyword[raise] identifier[exception] . identifier[IloError] ( identifier[msg] )
keyword[if] keyword[not] identifier[nics] :
identifier[msg] =( literal[string] )
keyword[raise] identifier[exception] . identifier[IloError] ( identifier[msg] )
identifier[iscsi_infos] =[]
keyword[for] identifier[nic] keyword[in] identifier[nics] :
identifier[data] = identifier[iscsi_info] . identifier[copy] ()
identifier[data] [ literal[string] ]= identifier[nic]
identifier[data] [ literal[string] ]= identifier[nic]
identifier[data] [ literal[string] ]= identifier[nics] . identifier[index] ( identifier[nic] )+ literal[int]
identifier[iscsi_infos] . identifier[append] ( identifier[data] )
identifier[iscsi_data] ={ literal[string] : identifier[iscsi_infos] }
keyword[try] :
( identifier[sushy_system] . identifier[bios_settings] . identifier[iscsi_resource] .
identifier[iscsi_settings] . identifier[update_iscsi_settings] ( identifier[iscsi_data] ))
keyword[except] identifier[sushy] . identifier[exceptions] . identifier[SushyError] keyword[as] identifier[e] :
identifier[msg] =( identifier[self] . identifier[_] ( literal[string]
literal[string] )%
{ literal[string] : identifier[str] ( identifier[e] )})
identifier[LOG] . identifier[debug] ( identifier[msg] )
keyword[raise] identifier[exception] . identifier[IloError] ( identifier[msg] ) | def _change_iscsi_target_settings(self, iscsi_info):
"""Change iSCSI target settings.
:param iscsi_info: A dictionary that contains information of iSCSI
target like target_name, lun, ip_address, port etc.
:raises: IloError, on an error from iLO.
"""
sushy_system = self._get_sushy_system(PROLIANT_SYSTEM_ID)
try:
pci_settings_map = sushy_system.bios_settings.bios_mappings.pci_settings_mappings
nics = []
for mapping in pci_settings_map:
for subinstance in mapping['Subinstances']:
for association in subinstance['Associations']:
if 'NicBoot' in association:
nics.append(association) # depends on [control=['if'], data=['association']] # depends on [control=['for'], data=['association']] # depends on [control=['for'], data=['subinstance']] # depends on [control=['for'], data=['mapping']] # depends on [control=['try'], data=[]]
except sushy.exceptions.SushyError as e:
msg = self._('The Redfish controller failed to get the bios mappings. Error %(error)s') % {'error': str(e)}
LOG.debug(msg)
raise exception.IloError(msg) # depends on [control=['except'], data=['e']]
if not nics:
msg = 'No nics were found on the system'
raise exception.IloError(msg) # depends on [control=['if'], data=[]]
# Set iSCSI info to all nics
iscsi_infos = []
for nic in nics:
data = iscsi_info.copy()
data['iSCSIAttemptName'] = nic
data['iSCSINicSource'] = nic
data['iSCSIAttemptInstance'] = nics.index(nic) + 1
iscsi_infos.append(data) # depends on [control=['for'], data=['nic']]
iscsi_data = {'iSCSISources': iscsi_infos}
try:
sushy_system.bios_settings.iscsi_resource.iscsi_settings.update_iscsi_settings(iscsi_data) # depends on [control=['try'], data=[]]
except sushy.exceptions.SushyError as e:
msg = self._('The Redfish controller is failed to update iSCSI settings. Error %(error)s') % {'error': str(e)}
LOG.debug(msg)
raise exception.IloError(msg) # depends on [control=['except'], data=['e']] |
def w(self, units=None):
"""
This returns a single array containing the phase-space positions.
Parameters
----------
units : `~gala.units.UnitSystem` (optional)
The unit system to represent the position and velocity in
before combining into the full array.
Returns
-------
w : `~numpy.ndarray`
A numpy array of all positions and velocities, without units.
Will have shape ``(2*ndim,...)``.
"""
if units is None:
if self.hamiltonian is None:
units = dimensionless
else:
units = self.hamiltonian.units
return super(Orbit, self).w(units=units) | def function[w, parameter[self, units]]:
constant[
This returns a single array containing the phase-space positions.
Parameters
----------
units : `~gala.units.UnitSystem` (optional)
The unit system to represent the position and velocity in
before combining into the full array.
Returns
-------
w : `~numpy.ndarray`
A numpy array of all positions and velocities, without units.
Will have shape ``(2*ndim,...)``.
]
if compare[name[units] is constant[None]] begin[:]
if compare[name[self].hamiltonian is constant[None]] begin[:]
variable[units] assign[=] name[dimensionless]
return[call[call[name[super], parameter[name[Orbit], name[self]]].w, parameter[]]] | keyword[def] identifier[w] ( identifier[self] , identifier[units] = keyword[None] ):
literal[string]
keyword[if] identifier[units] keyword[is] keyword[None] :
keyword[if] identifier[self] . identifier[hamiltonian] keyword[is] keyword[None] :
identifier[units] = identifier[dimensionless]
keyword[else] :
identifier[units] = identifier[self] . identifier[hamiltonian] . identifier[units]
keyword[return] identifier[super] ( identifier[Orbit] , identifier[self] ). identifier[w] ( identifier[units] = identifier[units] ) | def w(self, units=None):
"""
This returns a single array containing the phase-space positions.
Parameters
----------
units : `~gala.units.UnitSystem` (optional)
The unit system to represent the position and velocity in
before combining into the full array.
Returns
-------
w : `~numpy.ndarray`
A numpy array of all positions and velocities, without units.
Will have shape ``(2*ndim,...)``.
"""
if units is None:
if self.hamiltonian is None:
units = dimensionless # depends on [control=['if'], data=[]]
else:
units = self.hamiltonian.units # depends on [control=['if'], data=['units']]
return super(Orbit, self).w(units=units) |
def union_overlapping(intervals):
"""Union any overlapping intervals in the given set."""
disjoint_intervals = []
for interval in intervals:
if disjoint_intervals and disjoint_intervals[-1].overlaps(interval):
disjoint_intervals[-1] = disjoint_intervals[-1].union(interval)
else:
disjoint_intervals.append(interval)
return disjoint_intervals | def function[union_overlapping, parameter[intervals]]:
constant[Union any overlapping intervals in the given set.]
variable[disjoint_intervals] assign[=] list[[]]
for taget[name[interval]] in starred[name[intervals]] begin[:]
if <ast.BoolOp object at 0x7da1b0625cc0> begin[:]
call[name[disjoint_intervals]][<ast.UnaryOp object at 0x7da1b06268f0>] assign[=] call[call[name[disjoint_intervals]][<ast.UnaryOp object at 0x7da1b0627a60>].union, parameter[name[interval]]]
return[name[disjoint_intervals]] | keyword[def] identifier[union_overlapping] ( identifier[intervals] ):
literal[string]
identifier[disjoint_intervals] =[]
keyword[for] identifier[interval] keyword[in] identifier[intervals] :
keyword[if] identifier[disjoint_intervals] keyword[and] identifier[disjoint_intervals] [- literal[int] ]. identifier[overlaps] ( identifier[interval] ):
identifier[disjoint_intervals] [- literal[int] ]= identifier[disjoint_intervals] [- literal[int] ]. identifier[union] ( identifier[interval] )
keyword[else] :
identifier[disjoint_intervals] . identifier[append] ( identifier[interval] )
keyword[return] identifier[disjoint_intervals] | def union_overlapping(intervals):
"""Union any overlapping intervals in the given set."""
disjoint_intervals = []
for interval in intervals:
if disjoint_intervals and disjoint_intervals[-1].overlaps(interval):
disjoint_intervals[-1] = disjoint_intervals[-1].union(interval) # depends on [control=['if'], data=[]]
else:
disjoint_intervals.append(interval) # depends on [control=['for'], data=['interval']]
return disjoint_intervals |
def sync(self):
"""
Synchronise the settings. This means that the pixel start
values are shifted downwards so that they are synchronised
with a full-frame binned version. This does nothing if the
binning factors == 1.
"""
# needs some mods for ultracam ??
xbin = self.xbin.value()
ybin = self.ybin.value()
n = 0
for xsl, xsr, ys, nx, ny in self:
if xbin > 1:
xsl = xbin*((xsl-1)//xbin)+1
self.xsl[n].set(xsl)
xsr = xbin*((xsr-1025)//xbin)+1025
self.xsr[n].set(xsr)
if ybin > 1:
ys = ybin*((ys-1)//ybin)+1
self.ys[n].set(ys)
n += 1
g = get_root(self).globals
self.sbutt.config(bg=g.COL['main'])
self.sbutt.config(state='disable') | def function[sync, parameter[self]]:
constant[
Synchronise the settings. This means that the pixel start
values are shifted downwards so that they are synchronised
with a full-frame binned version. This does nothing if the
binning factors == 1.
]
variable[xbin] assign[=] call[name[self].xbin.value, parameter[]]
variable[ybin] assign[=] call[name[self].ybin.value, parameter[]]
variable[n] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da18eb55ae0>, <ast.Name object at 0x7da18eb564a0>, <ast.Name object at 0x7da18eb572b0>, <ast.Name object at 0x7da18eb559f0>, <ast.Name object at 0x7da18eb55c60>]]] in starred[name[self]] begin[:]
if compare[name[xbin] greater[>] constant[1]] begin[:]
variable[xsl] assign[=] binary_operation[binary_operation[name[xbin] * binary_operation[binary_operation[name[xsl] - constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> name[xbin]]] + constant[1]]
call[call[name[self].xsl][name[n]].set, parameter[name[xsl]]]
variable[xsr] assign[=] binary_operation[binary_operation[name[xbin] * binary_operation[binary_operation[name[xsr] - constant[1025]] <ast.FloorDiv object at 0x7da2590d6bc0> name[xbin]]] + constant[1025]]
call[call[name[self].xsr][name[n]].set, parameter[name[xsr]]]
if compare[name[ybin] greater[>] constant[1]] begin[:]
variable[ys] assign[=] binary_operation[binary_operation[name[ybin] * binary_operation[binary_operation[name[ys] - constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> name[ybin]]] + constant[1]]
call[call[name[self].ys][name[n]].set, parameter[name[ys]]]
<ast.AugAssign object at 0x7da18eb54c40>
variable[g] assign[=] call[name[get_root], parameter[name[self]]].globals
call[name[self].sbutt.config, parameter[]]
call[name[self].sbutt.config, parameter[]] | keyword[def] identifier[sync] ( identifier[self] ):
literal[string]
identifier[xbin] = identifier[self] . identifier[xbin] . identifier[value] ()
identifier[ybin] = identifier[self] . identifier[ybin] . identifier[value] ()
identifier[n] = literal[int]
keyword[for] identifier[xsl] , identifier[xsr] , identifier[ys] , identifier[nx] , identifier[ny] keyword[in] identifier[self] :
keyword[if] identifier[xbin] > literal[int] :
identifier[xsl] = identifier[xbin] *(( identifier[xsl] - literal[int] )// identifier[xbin] )+ literal[int]
identifier[self] . identifier[xsl] [ identifier[n] ]. identifier[set] ( identifier[xsl] )
identifier[xsr] = identifier[xbin] *(( identifier[xsr] - literal[int] )// identifier[xbin] )+ literal[int]
identifier[self] . identifier[xsr] [ identifier[n] ]. identifier[set] ( identifier[xsr] )
keyword[if] identifier[ybin] > literal[int] :
identifier[ys] = identifier[ybin] *(( identifier[ys] - literal[int] )// identifier[ybin] )+ literal[int]
identifier[self] . identifier[ys] [ identifier[n] ]. identifier[set] ( identifier[ys] )
identifier[n] += literal[int]
identifier[g] = identifier[get_root] ( identifier[self] ). identifier[globals]
identifier[self] . identifier[sbutt] . identifier[config] ( identifier[bg] = identifier[g] . identifier[COL] [ literal[string] ])
identifier[self] . identifier[sbutt] . identifier[config] ( identifier[state] = literal[string] ) | def sync(self):
"""
Synchronise the settings. This means that the pixel start
values are shifted downwards so that they are synchronised
with a full-frame binned version. This does nothing if the
binning factors == 1.
"""
# needs some mods for ultracam ??
xbin = self.xbin.value()
ybin = self.ybin.value()
n = 0
for (xsl, xsr, ys, nx, ny) in self:
if xbin > 1:
xsl = xbin * ((xsl - 1) // xbin) + 1
self.xsl[n].set(xsl)
xsr = xbin * ((xsr - 1025) // xbin) + 1025
self.xsr[n].set(xsr) # depends on [control=['if'], data=['xbin']]
if ybin > 1:
ys = ybin * ((ys - 1) // ybin) + 1
self.ys[n].set(ys) # depends on [control=['if'], data=['ybin']]
n += 1 # depends on [control=['for'], data=[]]
g = get_root(self).globals
self.sbutt.config(bg=g.COL['main'])
self.sbutt.config(state='disable') |
def saccade_model_em(pointlist):
'''
Estimates the reaction time and duration of the saccade by
fitting a saccade model to the data.
The model consists of three phases:
1) source phase, gaze is fixated onto a point
2) saccade phase, gaze moves steadily from the source point
onto the target point
3) target phase, gaze becomes fixated onto a point.
The estimation is done in Expectation-Maximation manner:
1) Initial locations are given for the source and target points.
2) Expectation: given the source and target points, saccade start
and end times are calculated and the gazepoints are divided
into three classes: source, saccade, and target gazepoints.
In EM terminology, the classes are the latent variables.
3) Maximization: the means of the new source and target gazepoints
become the new values of the source and target points.
4) Repeat steps 2) and 3) until the source and target points stay
the same.
Input arguments
pointlist, list of [x, y] points. 'None' values are not allowed.
Output arguments
source_points
saccade_points
target_points
mean_squared_error
Here we use two different concepts, times and indices:
Time t 0 1 2 3 4 5
| | | | | |
Vector [ 2 3 1 2 1 ]
| | | | |
Index i 0 1 2 3 4
'''
# Aliases
g = pointlist
# Max
max_t = len(g)
max_i = max_t - 1
# Initialize
mu_s = g[0] # First
mu_t = g[-1] # Last
t_start = min(max_t, 60) # Average SRT is about 200 ms
t_end = min(max_t, 70) # Average SD is about 30 ms
# To detect nonconvergent situations, memorize the visited t_start and
# t_end pairs and their model error.
t_history = TimePairValueHistory()
# Limit iterations in case there is a bug
max_iters = 50
em_iters = 0
for _ in range(max_iters):
t_start_hat, t_end_hat, mse, src_sse, sacc_sse, tgt_sse = saccade_model_mle(g, mu_s, mu_t, t_start, t_end)
if t_end_hat < t_start_hat:
raise Exception('t_end_hat < t_start_hat: ' + str(t_end_hat) + ',' + str(t_start_hat))
# Determine new centroids.
# Limit times so that there is at least one gazepoint.
t_start_c = min(max(t_start_hat, 1), max_t - 1)
t_end_c = min(max(t_end_hat , 1), max_t - 1)
# Compute means based on windows of 100 ms before and after saccade
g_source = select_points_time_to_time(g, 0, t_start_c)
g_target = select_points_time_to_time(g, t_end_c, max_t)
g_source30 = select_last_points(g_source, 30)
g_target30 = select_first_points(g_target, 30)
mu_s_hat = mean_point(g_source30)
mu_t_hat = mean_point(g_target30)
mu_s = mu_s_hat
mu_t = mu_t_hat
t_start = t_start_hat
t_end = t_end_hat
# Compute until we have arrived to same state again.
if not t_history.is_visited(t_start_hat, t_end_hat):
t_history.visit(t_start, t_end, mse, {
'src_sse': src_sse,
'sacc_sse': sacc_sse,
'tgt_sse': tgt_sse,
})
# The next round either is minimal again or goes here.
em_iters += 1
#print('t_start: ' + str(t_start))
#print('t_end: ' + str(t_end))
#print('mse: ' + str(mse))
else:
# Select the parameters that gave minimum error
t_start, t_end, mse, d = t_history.get_minimum()
src_sse = d['src_sse']
sacc_sse = d['sacc_sse']
tgt_sse = d['tgt_sse']
break
if em_iters == max_iters:
did_converge = False
else:
did_converge = True
source_points = select_points_time_to_time(g, 0, t_start)
saccade_points = select_points_time_to_time(g, t_start, t_end)
target_points = select_points_time_to_time(g, t_end, None)
mean_squared_error = mse
return source_points, saccade_points, target_points, mean_squared_error | def function[saccade_model_em, parameter[pointlist]]:
constant[
Estimates the reaction time and duration of the saccade by
fitting a saccade model to the data.
The model consists of three phases:
1) source phase, gaze is fixated onto a point
2) saccade phase, gaze moves steadily from the source point
onto the target point
3) target phase, gaze becomes fixated onto a point.
The estimation is done in Expectation-Maximation manner:
1) Initial locations are given for the source and target points.
2) Expectation: given the source and target points, saccade start
and end times are calculated and the gazepoints are divided
into three classes: source, saccade, and target gazepoints.
In EM terminology, the classes are the latent variables.
3) Maximization: the means of the new source and target gazepoints
become the new values of the source and target points.
4) Repeat steps 2) and 3) until the source and target points stay
the same.
Input arguments
pointlist, list of [x, y] points. 'None' values are not allowed.
Output arguments
source_points
saccade_points
target_points
mean_squared_error
Here we use two different concepts, times and indices:
Time t 0 1 2 3 4 5
| | | | | |
Vector [ 2 3 1 2 1 ]
| | | | |
Index i 0 1 2 3 4
]
variable[g] assign[=] name[pointlist]
variable[max_t] assign[=] call[name[len], parameter[name[g]]]
variable[max_i] assign[=] binary_operation[name[max_t] - constant[1]]
variable[mu_s] assign[=] call[name[g]][constant[0]]
variable[mu_t] assign[=] call[name[g]][<ast.UnaryOp object at 0x7da1b09e8fa0>]
variable[t_start] assign[=] call[name[min], parameter[name[max_t], constant[60]]]
variable[t_end] assign[=] call[name[min], parameter[name[max_t], constant[70]]]
variable[t_history] assign[=] call[name[TimePairValueHistory], parameter[]]
variable[max_iters] assign[=] constant[50]
variable[em_iters] assign[=] constant[0]
for taget[name[_]] in starred[call[name[range], parameter[name[max_iters]]]] begin[:]
<ast.Tuple object at 0x7da1b09ea650> assign[=] call[name[saccade_model_mle], parameter[name[g], name[mu_s], name[mu_t], name[t_start], name[t_end]]]
if compare[name[t_end_hat] less[<] name[t_start_hat]] begin[:]
<ast.Raise object at 0x7da1b09eb070>
variable[t_start_c] assign[=] call[name[min], parameter[call[name[max], parameter[name[t_start_hat], constant[1]]], binary_operation[name[max_t] - constant[1]]]]
variable[t_end_c] assign[=] call[name[min], parameter[call[name[max], parameter[name[t_end_hat], constant[1]]], binary_operation[name[max_t] - constant[1]]]]
variable[g_source] assign[=] call[name[select_points_time_to_time], parameter[name[g], constant[0], name[t_start_c]]]
variable[g_target] assign[=] call[name[select_points_time_to_time], parameter[name[g], name[t_end_c], name[max_t]]]
variable[g_source30] assign[=] call[name[select_last_points], parameter[name[g_source], constant[30]]]
variable[g_target30] assign[=] call[name[select_first_points], parameter[name[g_target], constant[30]]]
variable[mu_s_hat] assign[=] call[name[mean_point], parameter[name[g_source30]]]
variable[mu_t_hat] assign[=] call[name[mean_point], parameter[name[g_target30]]]
variable[mu_s] assign[=] name[mu_s_hat]
variable[mu_t] assign[=] name[mu_t_hat]
variable[t_start] assign[=] name[t_start_hat]
variable[t_end] assign[=] name[t_end_hat]
if <ast.UnaryOp object at 0x7da207f98340> begin[:]
call[name[t_history].visit, parameter[name[t_start], name[t_end], name[mse], dictionary[[<ast.Constant object at 0x7da207f98220>, <ast.Constant object at 0x7da207f9a800>, <ast.Constant object at 0x7da207f98790>], [<ast.Name object at 0x7da207f99330>, <ast.Name object at 0x7da207f9b8b0>, <ast.Name object at 0x7da207f9aa10>]]]]
<ast.AugAssign object at 0x7da207f99270>
if compare[name[em_iters] equal[==] name[max_iters]] begin[:]
variable[did_converge] assign[=] constant[False]
variable[source_points] assign[=] call[name[select_points_time_to_time], parameter[name[g], constant[0], name[t_start]]]
variable[saccade_points] assign[=] call[name[select_points_time_to_time], parameter[name[g], name[t_start], name[t_end]]]
variable[target_points] assign[=] call[name[select_points_time_to_time], parameter[name[g], name[t_end], constant[None]]]
variable[mean_squared_error] assign[=] name[mse]
return[tuple[[<ast.Name object at 0x7da207f9a4a0>, <ast.Name object at 0x7da207f98f70>, <ast.Name object at 0x7da207f99cf0>, <ast.Name object at 0x7da207f9a4d0>]]] | keyword[def] identifier[saccade_model_em] ( identifier[pointlist] ):
literal[string]
identifier[g] = identifier[pointlist]
identifier[max_t] = identifier[len] ( identifier[g] )
identifier[max_i] = identifier[max_t] - literal[int]
identifier[mu_s] = identifier[g] [ literal[int] ]
identifier[mu_t] = identifier[g] [- literal[int] ]
identifier[t_start] = identifier[min] ( identifier[max_t] , literal[int] )
identifier[t_end] = identifier[min] ( identifier[max_t] , literal[int] )
identifier[t_history] = identifier[TimePairValueHistory] ()
identifier[max_iters] = literal[int]
identifier[em_iters] = literal[int]
keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[max_iters] ):
identifier[t_start_hat] , identifier[t_end_hat] , identifier[mse] , identifier[src_sse] , identifier[sacc_sse] , identifier[tgt_sse] = identifier[saccade_model_mle] ( identifier[g] , identifier[mu_s] , identifier[mu_t] , identifier[t_start] , identifier[t_end] )
keyword[if] identifier[t_end_hat] < identifier[t_start_hat] :
keyword[raise] identifier[Exception] ( literal[string] + identifier[str] ( identifier[t_end_hat] )+ literal[string] + identifier[str] ( identifier[t_start_hat] ))
identifier[t_start_c] = identifier[min] ( identifier[max] ( identifier[t_start_hat] , literal[int] ), identifier[max_t] - literal[int] )
identifier[t_end_c] = identifier[min] ( identifier[max] ( identifier[t_end_hat] , literal[int] ), identifier[max_t] - literal[int] )
identifier[g_source] = identifier[select_points_time_to_time] ( identifier[g] , literal[int] , identifier[t_start_c] )
identifier[g_target] = identifier[select_points_time_to_time] ( identifier[g] , identifier[t_end_c] , identifier[max_t] )
identifier[g_source30] = identifier[select_last_points] ( identifier[g_source] , literal[int] )
identifier[g_target30] = identifier[select_first_points] ( identifier[g_target] , literal[int] )
identifier[mu_s_hat] = identifier[mean_point] ( identifier[g_source30] )
identifier[mu_t_hat] = identifier[mean_point] ( identifier[g_target30] )
identifier[mu_s] = identifier[mu_s_hat]
identifier[mu_t] = identifier[mu_t_hat]
identifier[t_start] = identifier[t_start_hat]
identifier[t_end] = identifier[t_end_hat]
keyword[if] keyword[not] identifier[t_history] . identifier[is_visited] ( identifier[t_start_hat] , identifier[t_end_hat] ):
identifier[t_history] . identifier[visit] ( identifier[t_start] , identifier[t_end] , identifier[mse] ,{
literal[string] : identifier[src_sse] ,
literal[string] : identifier[sacc_sse] ,
literal[string] : identifier[tgt_sse] ,
})
identifier[em_iters] += literal[int]
keyword[else] :
identifier[t_start] , identifier[t_end] , identifier[mse] , identifier[d] = identifier[t_history] . identifier[get_minimum] ()
identifier[src_sse] = identifier[d] [ literal[string] ]
identifier[sacc_sse] = identifier[d] [ literal[string] ]
identifier[tgt_sse] = identifier[d] [ literal[string] ]
keyword[break]
keyword[if] identifier[em_iters] == identifier[max_iters] :
identifier[did_converge] = keyword[False]
keyword[else] :
identifier[did_converge] = keyword[True]
identifier[source_points] = identifier[select_points_time_to_time] ( identifier[g] , literal[int] , identifier[t_start] )
identifier[saccade_points] = identifier[select_points_time_to_time] ( identifier[g] , identifier[t_start] , identifier[t_end] )
identifier[target_points] = identifier[select_points_time_to_time] ( identifier[g] , identifier[t_end] , keyword[None] )
identifier[mean_squared_error] = identifier[mse]
keyword[return] identifier[source_points] , identifier[saccade_points] , identifier[target_points] , identifier[mean_squared_error] | def saccade_model_em(pointlist):
"""
Estimates the reaction time and duration of the saccade by
fitting a saccade model to the data.
The model consists of three phases:
1) source phase, gaze is fixated onto a point
2) saccade phase, gaze moves steadily from the source point
onto the target point
3) target phase, gaze becomes fixated onto a point.
The estimation is done in Expectation-Maximation manner:
1) Initial locations are given for the source and target points.
2) Expectation: given the source and target points, saccade start
and end times are calculated and the gazepoints are divided
into three classes: source, saccade, and target gazepoints.
In EM terminology, the classes are the latent variables.
3) Maximization: the means of the new source and target gazepoints
become the new values of the source and target points.
4) Repeat steps 2) and 3) until the source and target points stay
the same.
Input arguments
pointlist, list of [x, y] points. 'None' values are not allowed.
Output arguments
source_points
saccade_points
target_points
mean_squared_error
Here we use two different concepts, times and indices:
Time t 0 1 2 3 4 5
| | | | | |
Vector [ 2 3 1 2 1 ]
| | | | |
Index i 0 1 2 3 4
"""
# Aliases
g = pointlist
# Max
max_t = len(g)
max_i = max_t - 1
# Initialize
mu_s = g[0] # First
mu_t = g[-1] # Last
t_start = min(max_t, 60) # Average SRT is about 200 ms
t_end = min(max_t, 70) # Average SD is about 30 ms
# To detect nonconvergent situations, memorize the visited t_start and
# t_end pairs and their model error.
t_history = TimePairValueHistory()
# Limit iterations in case there is a bug
max_iters = 50
em_iters = 0
for _ in range(max_iters):
(t_start_hat, t_end_hat, mse, src_sse, sacc_sse, tgt_sse) = saccade_model_mle(g, mu_s, mu_t, t_start, t_end)
if t_end_hat < t_start_hat:
raise Exception('t_end_hat < t_start_hat: ' + str(t_end_hat) + ',' + str(t_start_hat)) # depends on [control=['if'], data=['t_end_hat', 't_start_hat']]
# Determine new centroids.
# Limit times so that there is at least one gazepoint.
t_start_c = min(max(t_start_hat, 1), max_t - 1)
t_end_c = min(max(t_end_hat, 1), max_t - 1)
# Compute means based on windows of 100 ms before and after saccade
g_source = select_points_time_to_time(g, 0, t_start_c)
g_target = select_points_time_to_time(g, t_end_c, max_t)
g_source30 = select_last_points(g_source, 30)
g_target30 = select_first_points(g_target, 30)
mu_s_hat = mean_point(g_source30)
mu_t_hat = mean_point(g_target30)
mu_s = mu_s_hat
mu_t = mu_t_hat
t_start = t_start_hat
t_end = t_end_hat
# Compute until we have arrived to same state again.
if not t_history.is_visited(t_start_hat, t_end_hat):
t_history.visit(t_start, t_end, mse, {'src_sse': src_sse, 'sacc_sse': sacc_sse, 'tgt_sse': tgt_sse})
# The next round either is minimal again or goes here.
em_iters += 1 # depends on [control=['if'], data=[]]
else:
#print('t_start: ' + str(t_start))
#print('t_end: ' + str(t_end))
#print('mse: ' + str(mse))
# Select the parameters that gave minimum error
(t_start, t_end, mse, d) = t_history.get_minimum()
src_sse = d['src_sse']
sacc_sse = d['sacc_sse']
tgt_sse = d['tgt_sse']
break # depends on [control=['for'], data=[]]
if em_iters == max_iters:
did_converge = False # depends on [control=['if'], data=[]]
else:
did_converge = True
source_points = select_points_time_to_time(g, 0, t_start)
saccade_points = select_points_time_to_time(g, t_start, t_end)
target_points = select_points_time_to_time(g, t_end, None)
mean_squared_error = mse
return (source_points, saccade_points, target_points, mean_squared_error) |
def extract(ctx, dataset, kwargs):
"extracts the files from the compressed archives"
kwargs = parse_kwargs(kwargs)
data(dataset, **ctx.obj).extract(**kwargs) | def function[extract, parameter[ctx, dataset, kwargs]]:
constant[extracts the files from the compressed archives]
variable[kwargs] assign[=] call[name[parse_kwargs], parameter[name[kwargs]]]
call[call[name[data], parameter[name[dataset]]].extract, parameter[]] | keyword[def] identifier[extract] ( identifier[ctx] , identifier[dataset] , identifier[kwargs] ):
literal[string]
identifier[kwargs] = identifier[parse_kwargs] ( identifier[kwargs] )
identifier[data] ( identifier[dataset] ,** identifier[ctx] . identifier[obj] ). identifier[extract] (** identifier[kwargs] ) | def extract(ctx, dataset, kwargs):
"""extracts the files from the compressed archives"""
kwargs = parse_kwargs(kwargs)
data(dataset, **ctx.obj).extract(**kwargs) |
def validate_key(self, key):
"""
Django normally warns about maximum key length, but we error on it.
"""
if len(key) > 250:
raise ValueError(
"Cache key is longer than the maxmimum 250 characters: {}"
.format(key),
)
return super(MySQLCache, self).validate_key(key) | def function[validate_key, parameter[self, key]]:
constant[
Django normally warns about maximum key length, but we error on it.
]
if compare[call[name[len], parameter[name[key]]] greater[>] constant[250]] begin[:]
<ast.Raise object at 0x7da1b06c7250>
return[call[call[name[super], parameter[name[MySQLCache], name[self]]].validate_key, parameter[name[key]]]] | keyword[def] identifier[validate_key] ( identifier[self] , identifier[key] ):
literal[string]
keyword[if] identifier[len] ( identifier[key] )> literal[int] :
keyword[raise] identifier[ValueError] (
literal[string]
. identifier[format] ( identifier[key] ),
)
keyword[return] identifier[super] ( identifier[MySQLCache] , identifier[self] ). identifier[validate_key] ( identifier[key] ) | def validate_key(self, key):
"""
Django normally warns about maximum key length, but we error on it.
"""
if len(key) > 250:
raise ValueError('Cache key is longer than the maxmimum 250 characters: {}'.format(key)) # depends on [control=['if'], data=[]]
return super(MySQLCache, self).validate_key(key) |
def update_feed(self, feed, feed_id):
"""UpdateFeed.
[Preview API] Change the attributes of a feed.
:param :class:`<FeedUpdate> <azure.devops.v5_0.feed.models.FeedUpdate>` feed: A JSON object containing the feed settings to be updated.
:param str feed_id: Name or Id of the feed.
:rtype: :class:`<Feed> <azure.devops.v5_0.feed.models.Feed>`
"""
route_values = {}
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
content = self._serialize.body(feed, 'FeedUpdate')
response = self._send(http_method='PATCH',
location_id='c65009a7-474a-4ad1-8b42-7d852107ef8c',
version='5.0-preview.1',
route_values=route_values,
content=content)
return self._deserialize('Feed', response) | def function[update_feed, parameter[self, feed, feed_id]]:
constant[UpdateFeed.
[Preview API] Change the attributes of a feed.
:param :class:`<FeedUpdate> <azure.devops.v5_0.feed.models.FeedUpdate>` feed: A JSON object containing the feed settings to be updated.
:param str feed_id: Name or Id of the feed.
:rtype: :class:`<Feed> <azure.devops.v5_0.feed.models.Feed>`
]
variable[route_values] assign[=] dictionary[[], []]
if compare[name[feed_id] is_not constant[None]] begin[:]
call[name[route_values]][constant[feedId]] assign[=] call[name[self]._serialize.url, parameter[constant[feed_id], name[feed_id], constant[str]]]
variable[content] assign[=] call[name[self]._serialize.body, parameter[name[feed], constant[FeedUpdate]]]
variable[response] assign[=] call[name[self]._send, parameter[]]
return[call[name[self]._deserialize, parameter[constant[Feed], name[response]]]] | keyword[def] identifier[update_feed] ( identifier[self] , identifier[feed] , identifier[feed_id] ):
literal[string]
identifier[route_values] ={}
keyword[if] identifier[feed_id] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[feed_id] , literal[string] )
identifier[content] = identifier[self] . identifier[_serialize] . identifier[body] ( identifier[feed] , literal[string] )
identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] ,
identifier[location_id] = literal[string] ,
identifier[version] = literal[string] ,
identifier[route_values] = identifier[route_values] ,
identifier[content] = identifier[content] )
keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[response] ) | def update_feed(self, feed, feed_id):
"""UpdateFeed.
[Preview API] Change the attributes of a feed.
:param :class:`<FeedUpdate> <azure.devops.v5_0.feed.models.FeedUpdate>` feed: A JSON object containing the feed settings to be updated.
:param str feed_id: Name or Id of the feed.
:rtype: :class:`<Feed> <azure.devops.v5_0.feed.models.Feed>`
"""
route_values = {}
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str') # depends on [control=['if'], data=['feed_id']]
content = self._serialize.body(feed, 'FeedUpdate')
response = self._send(http_method='PATCH', location_id='c65009a7-474a-4ad1-8b42-7d852107ef8c', version='5.0-preview.1', route_values=route_values, content=content)
return self._deserialize('Feed', response) |
def update_reflexrules_workflow_state(portal):
"""
Updates Reflex Rules' inactive_state, otherwise they don't have it by
default.
:param portal: Portal object
:return: None
"""
wf_tool = getToolByName(portal, 'portal_workflow')
logger.info("Updating Reflex Rules' 'inactive_state's...")
wf = wf_tool.getWorkflowById("bika_inactive_workflow")
uc = api.get_tool('portal_catalog')
r_rules = uc(portal_type='ReflexRule')
for rr in r_rules:
obj = rr.getObject()
wf.updateRoleMappingsFor(obj)
obj.reindexObject()
logger.info("Reflex Rules' 'inactive_state's were updated.") | def function[update_reflexrules_workflow_state, parameter[portal]]:
constant[
Updates Reflex Rules' inactive_state, otherwise they don't have it by
default.
:param portal: Portal object
:return: None
]
variable[wf_tool] assign[=] call[name[getToolByName], parameter[name[portal], constant[portal_workflow]]]
call[name[logger].info, parameter[constant[Updating Reflex Rules' 'inactive_state's...]]]
variable[wf] assign[=] call[name[wf_tool].getWorkflowById, parameter[constant[bika_inactive_workflow]]]
variable[uc] assign[=] call[name[api].get_tool, parameter[constant[portal_catalog]]]
variable[r_rules] assign[=] call[name[uc], parameter[]]
for taget[name[rr]] in starred[name[r_rules]] begin[:]
variable[obj] assign[=] call[name[rr].getObject, parameter[]]
call[name[wf].updateRoleMappingsFor, parameter[name[obj]]]
call[name[obj].reindexObject, parameter[]]
call[name[logger].info, parameter[constant[Reflex Rules' 'inactive_state's were updated.]]] | keyword[def] identifier[update_reflexrules_workflow_state] ( identifier[portal] ):
literal[string]
identifier[wf_tool] = identifier[getToolByName] ( identifier[portal] , literal[string] )
identifier[logger] . identifier[info] ( literal[string] )
identifier[wf] = identifier[wf_tool] . identifier[getWorkflowById] ( literal[string] )
identifier[uc] = identifier[api] . identifier[get_tool] ( literal[string] )
identifier[r_rules] = identifier[uc] ( identifier[portal_type] = literal[string] )
keyword[for] identifier[rr] keyword[in] identifier[r_rules] :
identifier[obj] = identifier[rr] . identifier[getObject] ()
identifier[wf] . identifier[updateRoleMappingsFor] ( identifier[obj] )
identifier[obj] . identifier[reindexObject] ()
identifier[logger] . identifier[info] ( literal[string] ) | def update_reflexrules_workflow_state(portal):
"""
Updates Reflex Rules' inactive_state, otherwise they don't have it by
default.
:param portal: Portal object
:return: None
"""
wf_tool = getToolByName(portal, 'portal_workflow')
logger.info("Updating Reflex Rules' 'inactive_state's...")
wf = wf_tool.getWorkflowById('bika_inactive_workflow')
uc = api.get_tool('portal_catalog')
r_rules = uc(portal_type='ReflexRule')
for rr in r_rules:
obj = rr.getObject()
wf.updateRoleMappingsFor(obj)
obj.reindexObject() # depends on [control=['for'], data=['rr']]
logger.info("Reflex Rules' 'inactive_state's were updated.") |
def staticMovingAverage2(x, N=3, mode='reflect'):
"""
moving average filter for 1d arrays
supported modes for boundary handling: 'reflect' , 'constant'
"""
assert N > 1
x2 = np.empty(shape=x.shape[0] + N, dtype=x.dtype)
start = N - 2
if N == 2:
start = 1
end = N - start
x2[start:-end] = x
# boundaries
if mode == 'reflect':
x2[:start] = x[0] + x[0] - x[start - 1::-1]
x2[-end:] = x[-1] + x[-1] - x[-2:-end - 2:-1]
elif mode == 'nearest':
x2[:start] = x[0]
x2[-end:] = x[-1]
else:
raise NotImplementedError("mode='%s' not supported" % mode)
a1 = np.cumsum(x2)
a1 = (a1[N:] - a1[:-N]) / N
a2 = np.cumsum(x2[::-1])
a2 = (a2[N:] - a2[:-N]) / N
return 0.5 * (a1 + a2[::-1])
# TODO: unreachable code
cumsum = np.cumsum(x2)
return (cumsum[N:] - cumsum[:-N]) / N | def function[staticMovingAverage2, parameter[x, N, mode]]:
constant[
moving average filter for 1d arrays
supported modes for boundary handling: 'reflect' , 'constant'
]
assert[compare[name[N] greater[>] constant[1]]]
variable[x2] assign[=] call[name[np].empty, parameter[]]
variable[start] assign[=] binary_operation[name[N] - constant[2]]
if compare[name[N] equal[==] constant[2]] begin[:]
variable[start] assign[=] constant[1]
variable[end] assign[=] binary_operation[name[N] - name[start]]
call[name[x2]][<ast.Slice object at 0x7da207f98e80>] assign[=] name[x]
if compare[name[mode] equal[==] constant[reflect]] begin[:]
call[name[x2]][<ast.Slice object at 0x7da207f996f0>] assign[=] binary_operation[binary_operation[call[name[x]][constant[0]] + call[name[x]][constant[0]]] - call[name[x]][<ast.Slice object at 0x7da207f98100>]]
call[name[x2]][<ast.Slice object at 0x7da207f99810>] assign[=] binary_operation[binary_operation[call[name[x]][<ast.UnaryOp object at 0x7da207f9a440>] + call[name[x]][<ast.UnaryOp object at 0x7da207f9a560>]] - call[name[x]][<ast.Slice object at 0x7da207f990c0>]]
variable[a1] assign[=] call[name[np].cumsum, parameter[name[x2]]]
variable[a1] assign[=] binary_operation[binary_operation[call[name[a1]][<ast.Slice object at 0x7da207f992d0>] - call[name[a1]][<ast.Slice object at 0x7da207f981f0>]] / name[N]]
variable[a2] assign[=] call[name[np].cumsum, parameter[call[name[x2]][<ast.Slice object at 0x7da18f00e740>]]]
variable[a2] assign[=] binary_operation[binary_operation[call[name[a2]][<ast.Slice object at 0x7da18f00f0d0>] - call[name[a2]][<ast.Slice object at 0x7da18f00c850>]] / name[N]]
return[binary_operation[constant[0.5] * binary_operation[name[a1] + call[name[a2]][<ast.Slice object at 0x7da18f00c3a0>]]]]
variable[cumsum] assign[=] call[name[np].cumsum, parameter[name[x2]]]
return[binary_operation[binary_operation[call[name[cumsum]][<ast.Slice object at 0x7da18f00d8a0>] - call[name[cumsum]][<ast.Slice object at 0x7da18f00cf10>]] / name[N]]] | keyword[def] identifier[staticMovingAverage2] ( identifier[x] , identifier[N] = literal[int] , identifier[mode] = literal[string] ):
literal[string]
keyword[assert] identifier[N] > literal[int]
identifier[x2] = identifier[np] . identifier[empty] ( identifier[shape] = identifier[x] . identifier[shape] [ literal[int] ]+ identifier[N] , identifier[dtype] = identifier[x] . identifier[dtype] )
identifier[start] = identifier[N] - literal[int]
keyword[if] identifier[N] == literal[int] :
identifier[start] = literal[int]
identifier[end] = identifier[N] - identifier[start]
identifier[x2] [ identifier[start] :- identifier[end] ]= identifier[x]
keyword[if] identifier[mode] == literal[string] :
identifier[x2] [: identifier[start] ]= identifier[x] [ literal[int] ]+ identifier[x] [ literal[int] ]- identifier[x] [ identifier[start] - literal[int] ::- literal[int] ]
identifier[x2] [- identifier[end] :]= identifier[x] [- literal[int] ]+ identifier[x] [- literal[int] ]- identifier[x] [- literal[int] :- identifier[end] - literal[int] :- literal[int] ]
keyword[elif] identifier[mode] == literal[string] :
identifier[x2] [: identifier[start] ]= identifier[x] [ literal[int] ]
identifier[x2] [- identifier[end] :]= identifier[x] [- literal[int] ]
keyword[else] :
keyword[raise] identifier[NotImplementedError] ( literal[string] % identifier[mode] )
identifier[a1] = identifier[np] . identifier[cumsum] ( identifier[x2] )
identifier[a1] =( identifier[a1] [ identifier[N] :]- identifier[a1] [:- identifier[N] ])/ identifier[N]
identifier[a2] = identifier[np] . identifier[cumsum] ( identifier[x2] [::- literal[int] ])
identifier[a2] =( identifier[a2] [ identifier[N] :]- identifier[a2] [:- identifier[N] ])/ identifier[N]
keyword[return] literal[int] *( identifier[a1] + identifier[a2] [::- literal[int] ])
identifier[cumsum] = identifier[np] . identifier[cumsum] ( identifier[x2] )
keyword[return] ( identifier[cumsum] [ identifier[N] :]- identifier[cumsum] [:- identifier[N] ])/ identifier[N] | def staticMovingAverage2(x, N=3, mode='reflect'):
"""
moving average filter for 1d arrays
supported modes for boundary handling: 'reflect' , 'constant'
"""
assert N > 1
x2 = np.empty(shape=x.shape[0] + N, dtype=x.dtype)
start = N - 2
if N == 2:
start = 1 # depends on [control=['if'], data=[]]
end = N - start
x2[start:-end] = x
# boundaries
if mode == 'reflect':
x2[:start] = x[0] + x[0] - x[start - 1::-1]
x2[-end:] = x[-1] + x[-1] - x[-2:-end - 2:-1] # depends on [control=['if'], data=[]]
elif mode == 'nearest':
x2[:start] = x[0]
x2[-end:] = x[-1] # depends on [control=['if'], data=[]]
else:
raise NotImplementedError("mode='%s' not supported" % mode)
a1 = np.cumsum(x2)
a1 = (a1[N:] - a1[:-N]) / N
a2 = np.cumsum(x2[::-1])
a2 = (a2[N:] - a2[:-N]) / N
return 0.5 * (a1 + a2[::-1])
# TODO: unreachable code
cumsum = np.cumsum(x2)
return (cumsum[N:] - cumsum[:-N]) / N |
def matchlist_by_account(
self,
region,
encrypted_account_id,
queue=None,
begin_time=None,
end_time=None,
begin_index=None,
end_index=None,
season=None,
champion=None,
):
"""
Get matchlist for ranked games played on given account ID and platform ID
and filtered using given filter parameters, if any
A number of optional parameters are provided for filtering. It is up to the caller to
ensure that the combination of filter parameters provided is valid for the requested
account, otherwise, no matches may be returned.
Note that if either beginIndex or endIndex are specified, then both must be specified and
endIndex must be greater than beginIndex.
If endTime is specified, but not beginTime, then beginTime is effectively the start of the
account's match history.
If beginTime is specified, but not endTime, then endTime is effectively the current time.
Note that endTime should be greater than beginTime if both are specified, although there is
no maximum limit on their range.
:param string region: The region to execute this request on
:param string encrypted_account_id: The account ID.
:param Set[int] queue: Set of queue IDs for which to filtering matchlist.
:param long begin_time: The begin time to use for filtering matchlist specified as
epoch milliseconds.
:param long end_time: The end time to use for filtering matchlist specified as epoch
milliseconds.
:param int begin_index: The begin index to use for filtering matchlist.
:param int end_index: The end index to use for filtering matchlist.
:param Set[int] season: Set of season IDs for which to filtering matchlist.
:param Set[int] champion: Set of champion IDs for which to filtering matchlist.
:returns: MatchlistDto
"""
url, query = MatchApiV4Urls.matchlist_by_account(
region=region,
encrypted_account_id=encrypted_account_id,
queue=queue,
beginTime=begin_time,
endTime=end_time,
beginIndex=begin_index,
endIndex=end_index,
season=season,
champion=champion,
)
return self._raw_request(self.matchlist_by_account.__name__, region, url, query) | def function[matchlist_by_account, parameter[self, region, encrypted_account_id, queue, begin_time, end_time, begin_index, end_index, season, champion]]:
constant[
Get matchlist for ranked games played on given account ID and platform ID
and filtered using given filter parameters, if any
A number of optional parameters are provided for filtering. It is up to the caller to
ensure that the combination of filter parameters provided is valid for the requested
account, otherwise, no matches may be returned.
Note that if either beginIndex or endIndex are specified, then both must be specified and
endIndex must be greater than beginIndex.
If endTime is specified, but not beginTime, then beginTime is effectively the start of the
account's match history.
If beginTime is specified, but not endTime, then endTime is effectively the current time.
Note that endTime should be greater than beginTime if both are specified, although there is
no maximum limit on their range.
:param string region: The region to execute this request on
:param string encrypted_account_id: The account ID.
:param Set[int] queue: Set of queue IDs for which to filtering matchlist.
:param long begin_time: The begin time to use for filtering matchlist specified as
epoch milliseconds.
:param long end_time: The end time to use for filtering matchlist specified as epoch
milliseconds.
:param int begin_index: The begin index to use for filtering matchlist.
:param int end_index: The end index to use for filtering matchlist.
:param Set[int] season: Set of season IDs for which to filtering matchlist.
:param Set[int] champion: Set of champion IDs for which to filtering matchlist.
:returns: MatchlistDto
]
<ast.Tuple object at 0x7da1b1d4e260> assign[=] call[name[MatchApiV4Urls].matchlist_by_account, parameter[]]
return[call[name[self]._raw_request, parameter[name[self].matchlist_by_account.__name__, name[region], name[url], name[query]]]] | keyword[def] identifier[matchlist_by_account] (
identifier[self] ,
identifier[region] ,
identifier[encrypted_account_id] ,
identifier[queue] = keyword[None] ,
identifier[begin_time] = keyword[None] ,
identifier[end_time] = keyword[None] ,
identifier[begin_index] = keyword[None] ,
identifier[end_index] = keyword[None] ,
identifier[season] = keyword[None] ,
identifier[champion] = keyword[None] ,
):
literal[string]
identifier[url] , identifier[query] = identifier[MatchApiV4Urls] . identifier[matchlist_by_account] (
identifier[region] = identifier[region] ,
identifier[encrypted_account_id] = identifier[encrypted_account_id] ,
identifier[queue] = identifier[queue] ,
identifier[beginTime] = identifier[begin_time] ,
identifier[endTime] = identifier[end_time] ,
identifier[beginIndex] = identifier[begin_index] ,
identifier[endIndex] = identifier[end_index] ,
identifier[season] = identifier[season] ,
identifier[champion] = identifier[champion] ,
)
keyword[return] identifier[self] . identifier[_raw_request] ( identifier[self] . identifier[matchlist_by_account] . identifier[__name__] , identifier[region] , identifier[url] , identifier[query] ) | def matchlist_by_account(self, region, encrypted_account_id, queue=None, begin_time=None, end_time=None, begin_index=None, end_index=None, season=None, champion=None):
"""
Get matchlist for ranked games played on given account ID and platform ID
and filtered using given filter parameters, if any
A number of optional parameters are provided for filtering. It is up to the caller to
ensure that the combination of filter parameters provided is valid for the requested
account, otherwise, no matches may be returned.
Note that if either beginIndex or endIndex are specified, then both must be specified and
endIndex must be greater than beginIndex.
If endTime is specified, but not beginTime, then beginTime is effectively the start of the
account's match history.
If beginTime is specified, but not endTime, then endTime is effectively the current time.
Note that endTime should be greater than beginTime if both are specified, although there is
no maximum limit on their range.
:param string region: The region to execute this request on
:param string encrypted_account_id: The account ID.
:param Set[int] queue: Set of queue IDs for which to filtering matchlist.
:param long begin_time: The begin time to use for filtering matchlist specified as
epoch milliseconds.
:param long end_time: The end time to use for filtering matchlist specified as epoch
milliseconds.
:param int begin_index: The begin index to use for filtering matchlist.
:param int end_index: The end index to use for filtering matchlist.
:param Set[int] season: Set of season IDs for which to filtering matchlist.
:param Set[int] champion: Set of champion IDs for which to filtering matchlist.
:returns: MatchlistDto
"""
(url, query) = MatchApiV4Urls.matchlist_by_account(region=region, encrypted_account_id=encrypted_account_id, queue=queue, beginTime=begin_time, endTime=end_time, beginIndex=begin_index, endIndex=end_index, season=season, champion=champion)
return self._raw_request(self.matchlist_by_account.__name__, region, url, query) |
def get_url(self, instance=True):
'''get_url
High-level api: get_url returns a Restconf URL of the config node.
Parameters
----------
instance : `bool`
True if the Restconf URL refers to only one instance of a list or
leaf-list. False if the Restconf URL refers to all instances of a
list or leaf-list.
Returns
-------
str
A Restconf URL.
'''
def convert(default_ns, nodes):
ret = ''
for node in nodes:
default_ns, id = self.device.convert_tag(default_ns, node.tag,
dst=Tag.JSON_NAME)
ret += '/' + quote(id, safe='')
if self.is_config:
n = Composer(self.device, node)
if n.schema_node.get('type') == 'leaf-list':
if node != self.node or instance:
ret += '={}'.format(quote(node.text, safe=''))
elif n.schema_node.get('type') == 'list':
if node != self.node or instance:
values = []
for key in n.keys:
values.append(quote(node.find(key).text,
safe=''))
ret += '={}'.format(','.join(values))
return ret
nodes = list(reversed(list(self.node.iterancestors())))[1:] + \
[self.node]
return '/restconf/data' + convert('', nodes) | def function[get_url, parameter[self, instance]]:
constant[get_url
High-level api: get_url returns a Restconf URL of the config node.
Parameters
----------
instance : `bool`
True if the Restconf URL refers to only one instance of a list or
leaf-list. False if the Restconf URL refers to all instances of a
list or leaf-list.
Returns
-------
str
A Restconf URL.
]
def function[convert, parameter[default_ns, nodes]]:
variable[ret] assign[=] constant[]
for taget[name[node]] in starred[name[nodes]] begin[:]
<ast.Tuple object at 0x7da1b253be80> assign[=] call[name[self].device.convert_tag, parameter[name[default_ns], name[node].tag]]
<ast.AugAssign object at 0x7da1b253b040>
if name[self].is_config begin[:]
variable[n] assign[=] call[name[Composer], parameter[name[self].device, name[node]]]
if compare[call[name[n].schema_node.get, parameter[constant[type]]] equal[==] constant[leaf-list]] begin[:]
if <ast.BoolOp object at 0x7da1b2539900> begin[:]
<ast.AugAssign object at 0x7da1b2539990>
return[name[ret]]
variable[nodes] assign[=] binary_operation[call[call[name[list], parameter[call[name[reversed], parameter[call[name[list], parameter[call[name[self].node.iterancestors, parameter[]]]]]]]]][<ast.Slice object at 0x7da1b25383d0>] + list[[<ast.Attribute object at 0x7da1b2538340>]]]
return[binary_operation[constant[/restconf/data] + call[name[convert], parameter[constant[], name[nodes]]]]] | keyword[def] identifier[get_url] ( identifier[self] , identifier[instance] = keyword[True] ):
literal[string]
keyword[def] identifier[convert] ( identifier[default_ns] , identifier[nodes] ):
identifier[ret] = literal[string]
keyword[for] identifier[node] keyword[in] identifier[nodes] :
identifier[default_ns] , identifier[id] = identifier[self] . identifier[device] . identifier[convert_tag] ( identifier[default_ns] , identifier[node] . identifier[tag] ,
identifier[dst] = identifier[Tag] . identifier[JSON_NAME] )
identifier[ret] += literal[string] + identifier[quote] ( identifier[id] , identifier[safe] = literal[string] )
keyword[if] identifier[self] . identifier[is_config] :
identifier[n] = identifier[Composer] ( identifier[self] . identifier[device] , identifier[node] )
keyword[if] identifier[n] . identifier[schema_node] . identifier[get] ( literal[string] )== literal[string] :
keyword[if] identifier[node] != identifier[self] . identifier[node] keyword[or] identifier[instance] :
identifier[ret] += literal[string] . identifier[format] ( identifier[quote] ( identifier[node] . identifier[text] , identifier[safe] = literal[string] ))
keyword[elif] identifier[n] . identifier[schema_node] . identifier[get] ( literal[string] )== literal[string] :
keyword[if] identifier[node] != identifier[self] . identifier[node] keyword[or] identifier[instance] :
identifier[values] =[]
keyword[for] identifier[key] keyword[in] identifier[n] . identifier[keys] :
identifier[values] . identifier[append] ( identifier[quote] ( identifier[node] . identifier[find] ( identifier[key] ). identifier[text] ,
identifier[safe] = literal[string] ))
identifier[ret] += literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[values] ))
keyword[return] identifier[ret]
identifier[nodes] = identifier[list] ( identifier[reversed] ( identifier[list] ( identifier[self] . identifier[node] . identifier[iterancestors] ())))[ literal[int] :]+[ identifier[self] . identifier[node] ]
keyword[return] literal[string] + identifier[convert] ( literal[string] , identifier[nodes] ) | def get_url(self, instance=True):
"""get_url
High-level api: get_url returns a Restconf URL of the config node.
Parameters
----------
instance : `bool`
True if the Restconf URL refers to only one instance of a list or
leaf-list. False if the Restconf URL refers to all instances of a
list or leaf-list.
Returns
-------
str
A Restconf URL.
"""
def convert(default_ns, nodes):
ret = ''
for node in nodes:
(default_ns, id) = self.device.convert_tag(default_ns, node.tag, dst=Tag.JSON_NAME)
ret += '/' + quote(id, safe='')
if self.is_config:
n = Composer(self.device, node)
if n.schema_node.get('type') == 'leaf-list':
if node != self.node or instance:
ret += '={}'.format(quote(node.text, safe='')) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif n.schema_node.get('type') == 'list':
if node != self.node or instance:
values = []
for key in n.keys:
values.append(quote(node.find(key).text, safe='')) # depends on [control=['for'], data=['key']]
ret += '={}'.format(','.join(values)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']]
return ret
nodes = list(reversed(list(self.node.iterancestors())))[1:] + [self.node]
return '/restconf/data' + convert('', nodes) |
def download_object(self, instance, bucket_name, object_name):
"""
Download an object.
:param str instance: A Yamcs instance name.
:param str bucket_name: The name of the bucket.
:param str object_name: The object to fetch.
"""
url = '/buckets/{}/{}/{}'.format(instance, bucket_name, object_name)
response = self._client.get_proto(path=url)
return response.content | def function[download_object, parameter[self, instance, bucket_name, object_name]]:
constant[
Download an object.
:param str instance: A Yamcs instance name.
:param str bucket_name: The name of the bucket.
:param str object_name: The object to fetch.
]
variable[url] assign[=] call[constant[/buckets/{}/{}/{}].format, parameter[name[instance], name[bucket_name], name[object_name]]]
variable[response] assign[=] call[name[self]._client.get_proto, parameter[]]
return[name[response].content] | keyword[def] identifier[download_object] ( identifier[self] , identifier[instance] , identifier[bucket_name] , identifier[object_name] ):
literal[string]
identifier[url] = literal[string] . identifier[format] ( identifier[instance] , identifier[bucket_name] , identifier[object_name] )
identifier[response] = identifier[self] . identifier[_client] . identifier[get_proto] ( identifier[path] = identifier[url] )
keyword[return] identifier[response] . identifier[content] | def download_object(self, instance, bucket_name, object_name):
"""
Download an object.
:param str instance: A Yamcs instance name.
:param str bucket_name: The name of the bucket.
:param str object_name: The object to fetch.
"""
url = '/buckets/{}/{}/{}'.format(instance, bucket_name, object_name)
response = self._client.get_proto(path=url)
return response.content |
def cli(obj, customer, match, delete):
"""Add group/org/domain/role-to-customer or delete lookup entry."""
client = obj['client']
if delete:
client.delete_customer(delete)
else:
if not customer:
raise click.UsageError('Missing option "--customer".')
if not match:
raise click.UsageError('Missing option "--org" / "--group" / "--domain" / "--role".')
try:
customer = client.create_customer(customer, match)
except Exception as e:
click.echo('ERROR: {}'.format(e))
sys.exit(1)
click.echo(customer.id) | def function[cli, parameter[obj, customer, match, delete]]:
constant[Add group/org/domain/role-to-customer or delete lookup entry.]
variable[client] assign[=] call[name[obj]][constant[client]]
if name[delete] begin[:]
call[name[client].delete_customer, parameter[name[delete]]] | keyword[def] identifier[cli] ( identifier[obj] , identifier[customer] , identifier[match] , identifier[delete] ):
literal[string]
identifier[client] = identifier[obj] [ literal[string] ]
keyword[if] identifier[delete] :
identifier[client] . identifier[delete_customer] ( identifier[delete] )
keyword[else] :
keyword[if] keyword[not] identifier[customer] :
keyword[raise] identifier[click] . identifier[UsageError] ( literal[string] )
keyword[if] keyword[not] identifier[match] :
keyword[raise] identifier[click] . identifier[UsageError] ( literal[string] )
keyword[try] :
identifier[customer] = identifier[client] . identifier[create_customer] ( identifier[customer] , identifier[match] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[e] ))
identifier[sys] . identifier[exit] ( literal[int] )
identifier[click] . identifier[echo] ( identifier[customer] . identifier[id] ) | def cli(obj, customer, match, delete):
"""Add group/org/domain/role-to-customer or delete lookup entry."""
client = obj['client']
if delete:
client.delete_customer(delete) # depends on [control=['if'], data=[]]
else:
if not customer:
raise click.UsageError('Missing option "--customer".') # depends on [control=['if'], data=[]]
if not match:
raise click.UsageError('Missing option "--org" / "--group" / "--domain" / "--role".') # depends on [control=['if'], data=[]]
try:
customer = client.create_customer(customer, match) # depends on [control=['try'], data=[]]
except Exception as e:
click.echo('ERROR: {}'.format(e))
sys.exit(1) # depends on [control=['except'], data=['e']]
click.echo(customer.id) |
def _show_info(app):
"""
显示系统信息
"""
print ("Server start on port {0} (processes: {1}) ...".format(app.port, app.processes))
print ("Start time: {0}".format(datetime.now().isoformat(" ")))
print
print ("Parameters:")
for k in sorted(dir(__conf__)):
if k.startswith("__"): continue
print (" {0:<20} : {1}".format(k, getattr(__conf__, k)))
print
print ("Handlers:")
handlers = sorted(app.handlers, key = lambda h: h[0])
pprint(handlers)
print | def function[_show_info, parameter[app]]:
constant[
显示系统信息
]
call[name[print], parameter[call[constant[Server start on port {0} (processes: {1}) ...].format, parameter[name[app].port, name[app].processes]]]]
call[name[print], parameter[call[constant[Start time: {0}].format, parameter[call[call[name[datetime].now, parameter[]].isoformat, parameter[constant[ ]]]]]]]
name[print]
call[name[print], parameter[constant[Parameters:]]]
for taget[name[k]] in starred[call[name[sorted], parameter[call[name[dir], parameter[name[__conf__]]]]]] begin[:]
if call[name[k].startswith, parameter[constant[__]]] begin[:]
continue
call[name[print], parameter[call[constant[ {0:<20} : {1}].format, parameter[name[k], call[name[getattr], parameter[name[__conf__], name[k]]]]]]]
name[print]
call[name[print], parameter[constant[Handlers:]]]
variable[handlers] assign[=] call[name[sorted], parameter[name[app].handlers]]
call[name[pprint], parameter[name[handlers]]]
name[print] | keyword[def] identifier[_show_info] ( identifier[app] ):
literal[string]
identifier[print] ( literal[string] . identifier[format] ( identifier[app] . identifier[port] , identifier[app] . identifier[processes] ))
identifier[print] ( literal[string] . identifier[format] ( identifier[datetime] . identifier[now] (). identifier[isoformat] ( literal[string] )))
identifier[print]
identifier[print] ( literal[string] )
keyword[for] identifier[k] keyword[in] identifier[sorted] ( identifier[dir] ( identifier[__conf__] )):
keyword[if] identifier[k] . identifier[startswith] ( literal[string] ): keyword[continue]
identifier[print] ( literal[string] . identifier[format] ( identifier[k] , identifier[getattr] ( identifier[__conf__] , identifier[k] )))
identifier[print]
identifier[print] ( literal[string] )
identifier[handlers] = identifier[sorted] ( identifier[app] . identifier[handlers] , identifier[key] = keyword[lambda] identifier[h] : identifier[h] [ literal[int] ])
identifier[pprint] ( identifier[handlers] )
identifier[print] | def _show_info(app):
"""
显示系统信息
"""
print('Server start on port {0} (processes: {1}) ...'.format(app.port, app.processes))
print('Start time: {0}'.format(datetime.now().isoformat(' ')))
print
print('Parameters:')
for k in sorted(dir(__conf__)):
if k.startswith('__'):
continue # depends on [control=['if'], data=[]]
print(' {0:<20} : {1}'.format(k, getattr(__conf__, k))) # depends on [control=['for'], data=['k']]
print
print('Handlers:')
handlers = sorted(app.handlers, key=lambda h: h[0])
pprint(handlers)
print |
def competition_submit_cli(self,
file_name,
message,
competition,
competition_opt=None,
quiet=False):
""" submit a competition using the client. Arguments are same as for
competition_submit, except for extra arguments provided here.
Parameters
==========
competition_opt: an alternative competition option provided by cli
"""
competition = competition or competition_opt
try:
submit_result = self.competition_submit(file_name, message,
competition, quiet)
except ApiException as e:
if e.status == 404:
print('Could not find competition - please verify that you '
'entered the correct competition ID and that the '
'competition is still accepting submissions.')
return None
else:
raise e
return submit_result | def function[competition_submit_cli, parameter[self, file_name, message, competition, competition_opt, quiet]]:
constant[ submit a competition using the client. Arguments are same as for
competition_submit, except for extra arguments provided here.
Parameters
==========
competition_opt: an alternative competition option provided by cli
]
variable[competition] assign[=] <ast.BoolOp object at 0x7da1b21e3760>
<ast.Try object at 0x7da1b21e0ac0>
return[name[submit_result]] | keyword[def] identifier[competition_submit_cli] ( identifier[self] ,
identifier[file_name] ,
identifier[message] ,
identifier[competition] ,
identifier[competition_opt] = keyword[None] ,
identifier[quiet] = keyword[False] ):
literal[string]
identifier[competition] = identifier[competition] keyword[or] identifier[competition_opt]
keyword[try] :
identifier[submit_result] = identifier[self] . identifier[competition_submit] ( identifier[file_name] , identifier[message] ,
identifier[competition] , identifier[quiet] )
keyword[except] identifier[ApiException] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[status] == literal[int] :
identifier[print] ( literal[string]
literal[string]
literal[string] )
keyword[return] keyword[None]
keyword[else] :
keyword[raise] identifier[e]
keyword[return] identifier[submit_result] | def competition_submit_cli(self, file_name, message, competition, competition_opt=None, quiet=False):
""" submit a competition using the client. Arguments are same as for
competition_submit, except for extra arguments provided here.
Parameters
==========
competition_opt: an alternative competition option provided by cli
"""
competition = competition or competition_opt
try:
submit_result = self.competition_submit(file_name, message, competition, quiet) # depends on [control=['try'], data=[]]
except ApiException as e:
if e.status == 404:
print('Could not find competition - please verify that you entered the correct competition ID and that the competition is still accepting submissions.')
return None # depends on [control=['if'], data=[]]
else:
raise e # depends on [control=['except'], data=['e']]
return submit_result |
def parse_dash(string, width):
"parse dash pattern specified with string"
# DashConvert from {tk-sources}/generic/tkCanvUtil.c
w = max(1, int(width + 0.5))
n = len(string)
result = []
for i, c in enumerate(string):
if c == " " and len(result):
result[-1] += w + 1
elif c == "_":
result.append(8*w)
result.append(4*w)
elif c == "-":
result.append(6*w)
result.append(4*w)
elif c == ",":
result.append(4*w)
result.append(4*w)
elif c == ".":
result.append(2*w)
result.append(4*w)
return result | def function[parse_dash, parameter[string, width]]:
constant[parse dash pattern specified with string]
variable[w] assign[=] call[name[max], parameter[constant[1], call[name[int], parameter[binary_operation[name[width] + constant[0.5]]]]]]
variable[n] assign[=] call[name[len], parameter[name[string]]]
variable[result] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b0f47d30>, <ast.Name object at 0x7da1b0f463b0>]]] in starred[call[name[enumerate], parameter[name[string]]]] begin[:]
if <ast.BoolOp object at 0x7da1b0f444c0> begin[:]
<ast.AugAssign object at 0x7da1b0f44d60>
return[name[result]] | keyword[def] identifier[parse_dash] ( identifier[string] , identifier[width] ):
literal[string]
identifier[w] = identifier[max] ( literal[int] , identifier[int] ( identifier[width] + literal[int] ))
identifier[n] = identifier[len] ( identifier[string] )
identifier[result] =[]
keyword[for] identifier[i] , identifier[c] keyword[in] identifier[enumerate] ( identifier[string] ):
keyword[if] identifier[c] == literal[string] keyword[and] identifier[len] ( identifier[result] ):
identifier[result] [- literal[int] ]+= identifier[w] + literal[int]
keyword[elif] identifier[c] == literal[string] :
identifier[result] . identifier[append] ( literal[int] * identifier[w] )
identifier[result] . identifier[append] ( literal[int] * identifier[w] )
keyword[elif] identifier[c] == literal[string] :
identifier[result] . identifier[append] ( literal[int] * identifier[w] )
identifier[result] . identifier[append] ( literal[int] * identifier[w] )
keyword[elif] identifier[c] == literal[string] :
identifier[result] . identifier[append] ( literal[int] * identifier[w] )
identifier[result] . identifier[append] ( literal[int] * identifier[w] )
keyword[elif] identifier[c] == literal[string] :
identifier[result] . identifier[append] ( literal[int] * identifier[w] )
identifier[result] . identifier[append] ( literal[int] * identifier[w] )
keyword[return] identifier[result] | def parse_dash(string, width):
"""parse dash pattern specified with string""" # DashConvert from {tk-sources}/generic/tkCanvUtil.c
w = max(1, int(width + 0.5))
n = len(string)
result = []
for (i, c) in enumerate(string):
if c == ' ' and len(result):
result[-1] += w + 1 # depends on [control=['if'], data=[]]
elif c == '_':
result.append(8 * w)
result.append(4 * w) # depends on [control=['if'], data=[]]
elif c == '-':
result.append(6 * w)
result.append(4 * w) # depends on [control=['if'], data=[]]
elif c == ',':
result.append(4 * w)
result.append(4 * w) # depends on [control=['if'], data=[]]
elif c == '.':
result.append(2 * w)
result.append(4 * w) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return result |
def matrix_asformat(lvl, name, format, blocksize=None):
"""Set a matrix to a specific format.
This routine looks for the matrix "name" in the specified format as a
member of the level instance, lvl. For example, if name='A', format='bsr'
and blocksize=(4,4), and if lvl.Absr44 exists with the correct blocksize,
then lvl.Absr is returned. If the matrix doesn't already exist, lvl.name
is converted to the desired format, and made a member of lvl.
Only create such persistent copies of a matrix for routines such as
presmoothing and postsmoothing, where the matrix conversion is done every
cycle.
Calling this function can _dramatically_ increase your memory costs.
Be careful with it's usage.
"""
desired_matrix = name + format
M = getattr(lvl, name)
if format == 'bsr':
desired_matrix += str(blocksize[0])+str(blocksize[1])
if hasattr(lvl, desired_matrix):
# if lvl already contains lvl.name+format
pass
elif M.format == format and format != 'bsr':
# is base_matrix already in the correct format?
setattr(lvl, desired_matrix, M)
elif M.format == format and format == 'bsr':
# convert to bsr with the right blocksize
# tobsr() will not do anything extra if this is uneeded
setattr(lvl, desired_matrix, M.tobsr(blocksize=blocksize))
else:
# convert
newM = getattr(M, 'to' + format)()
setattr(lvl, desired_matrix, newM)
return getattr(lvl, desired_matrix) | def function[matrix_asformat, parameter[lvl, name, format, blocksize]]:
constant[Set a matrix to a specific format.
This routine looks for the matrix "name" in the specified format as a
member of the level instance, lvl. For example, if name='A', format='bsr'
and blocksize=(4,4), and if lvl.Absr44 exists with the correct blocksize,
then lvl.Absr is returned. If the matrix doesn't already exist, lvl.name
is converted to the desired format, and made a member of lvl.
Only create such persistent copies of a matrix for routines such as
presmoothing and postsmoothing, where the matrix conversion is done every
cycle.
Calling this function can _dramatically_ increase your memory costs.
Be careful with it's usage.
]
variable[desired_matrix] assign[=] binary_operation[name[name] + name[format]]
variable[M] assign[=] call[name[getattr], parameter[name[lvl], name[name]]]
if compare[name[format] equal[==] constant[bsr]] begin[:]
<ast.AugAssign object at 0x7da1b0658af0>
if call[name[hasattr], parameter[name[lvl], name[desired_matrix]]] begin[:]
pass
return[call[name[getattr], parameter[name[lvl], name[desired_matrix]]]] | keyword[def] identifier[matrix_asformat] ( identifier[lvl] , identifier[name] , identifier[format] , identifier[blocksize] = keyword[None] ):
literal[string]
identifier[desired_matrix] = identifier[name] + identifier[format]
identifier[M] = identifier[getattr] ( identifier[lvl] , identifier[name] )
keyword[if] identifier[format] == literal[string] :
identifier[desired_matrix] += identifier[str] ( identifier[blocksize] [ literal[int] ])+ identifier[str] ( identifier[blocksize] [ literal[int] ])
keyword[if] identifier[hasattr] ( identifier[lvl] , identifier[desired_matrix] ):
keyword[pass]
keyword[elif] identifier[M] . identifier[format] == identifier[format] keyword[and] identifier[format] != literal[string] :
identifier[setattr] ( identifier[lvl] , identifier[desired_matrix] , identifier[M] )
keyword[elif] identifier[M] . identifier[format] == identifier[format] keyword[and] identifier[format] == literal[string] :
identifier[setattr] ( identifier[lvl] , identifier[desired_matrix] , identifier[M] . identifier[tobsr] ( identifier[blocksize] = identifier[blocksize] ))
keyword[else] :
identifier[newM] = identifier[getattr] ( identifier[M] , literal[string] + identifier[format] )()
identifier[setattr] ( identifier[lvl] , identifier[desired_matrix] , identifier[newM] )
keyword[return] identifier[getattr] ( identifier[lvl] , identifier[desired_matrix] ) | def matrix_asformat(lvl, name, format, blocksize=None):
"""Set a matrix to a specific format.
This routine looks for the matrix "name" in the specified format as a
member of the level instance, lvl. For example, if name='A', format='bsr'
and blocksize=(4,4), and if lvl.Absr44 exists with the correct blocksize,
then lvl.Absr is returned. If the matrix doesn't already exist, lvl.name
is converted to the desired format, and made a member of lvl.
Only create such persistent copies of a matrix for routines such as
presmoothing and postsmoothing, where the matrix conversion is done every
cycle.
Calling this function can _dramatically_ increase your memory costs.
Be careful with it's usage.
"""
desired_matrix = name + format
M = getattr(lvl, name)
if format == 'bsr':
desired_matrix += str(blocksize[0]) + str(blocksize[1]) # depends on [control=['if'], data=[]]
if hasattr(lvl, desired_matrix):
# if lvl already contains lvl.name+format
pass # depends on [control=['if'], data=[]]
elif M.format == format and format != 'bsr':
# is base_matrix already in the correct format?
setattr(lvl, desired_matrix, M) # depends on [control=['if'], data=[]]
elif M.format == format and format == 'bsr':
# convert to bsr with the right blocksize
# tobsr() will not do anything extra if this is uneeded
setattr(lvl, desired_matrix, M.tobsr(blocksize=blocksize)) # depends on [control=['if'], data=[]]
else:
# convert
newM = getattr(M, 'to' + format)()
setattr(lvl, desired_matrix, newM)
return getattr(lvl, desired_matrix) |
def from_json(raw):
"""Helper to construct a node from a dict.
Args:
raw (dict): Raw node representation.
Returns:
Node: A Node object or None.
"""
ncls = None
_type = raw.get('type')
try:
ncls = _type_map[NodeType(_type)]
except (KeyError, ValueError) as e:
logger.warning('Unknown node type: %s', _type)
if DEBUG:
raise_from(exception.ParseException('Parse error for %s' % (_type), raw), e)
return None
node = ncls()
node.load(raw)
return node | def function[from_json, parameter[raw]]:
constant[Helper to construct a node from a dict.
Args:
raw (dict): Raw node representation.
Returns:
Node: A Node object or None.
]
variable[ncls] assign[=] constant[None]
variable[_type] assign[=] call[name[raw].get, parameter[constant[type]]]
<ast.Try object at 0x7da1b2346470>
variable[node] assign[=] call[name[ncls], parameter[]]
call[name[node].load, parameter[name[raw]]]
return[name[node]] | keyword[def] identifier[from_json] ( identifier[raw] ):
literal[string]
identifier[ncls] = keyword[None]
identifier[_type] = identifier[raw] . identifier[get] ( literal[string] )
keyword[try] :
identifier[ncls] = identifier[_type_map] [ identifier[NodeType] ( identifier[_type] )]
keyword[except] ( identifier[KeyError] , identifier[ValueError] ) keyword[as] identifier[e] :
identifier[logger] . identifier[warning] ( literal[string] , identifier[_type] )
keyword[if] identifier[DEBUG] :
identifier[raise_from] ( identifier[exception] . identifier[ParseException] ( literal[string] %( identifier[_type] ), identifier[raw] ), identifier[e] )
keyword[return] keyword[None]
identifier[node] = identifier[ncls] ()
identifier[node] . identifier[load] ( identifier[raw] )
keyword[return] identifier[node] | def from_json(raw):
"""Helper to construct a node from a dict.
Args:
raw (dict): Raw node representation.
Returns:
Node: A Node object or None.
"""
ncls = None
_type = raw.get('type')
try:
ncls = _type_map[NodeType(_type)] # depends on [control=['try'], data=[]]
except (KeyError, ValueError) as e:
logger.warning('Unknown node type: %s', _type)
if DEBUG:
raise_from(exception.ParseException('Parse error for %s' % _type, raw), e) # depends on [control=['if'], data=[]]
return None # depends on [control=['except'], data=['e']]
node = ncls()
node.load(raw)
return node |
def score(self):
"The total score for the words found, according to the rules."
return sum([self.scores[len(w)] for w in self.words()]) | def function[score, parameter[self]]:
constant[The total score for the words found, according to the rules.]
return[call[name[sum], parameter[<ast.ListComp object at 0x7da2045653f0>]]] | keyword[def] identifier[score] ( identifier[self] ):
literal[string]
keyword[return] identifier[sum] ([ identifier[self] . identifier[scores] [ identifier[len] ( identifier[w] )] keyword[for] identifier[w] keyword[in] identifier[self] . identifier[words] ()]) | def score(self):
"""The total score for the words found, according to the rules."""
return sum([self.scores[len(w)] for w in self.words()]) |
def _preprocess_request(self, save_data, return_data):
"""
Prepares requests for download and creates empty folders
:param save_data: Tells whether to save data or not
:type: bool
:param return_data: Tells whether to return data or not
:type: bool
"""
if not self.is_valid_request():
raise ValueError('Cannot obtain data because request is invalid')
if save_data and self.data_folder is None:
raise ValueError('Request parameter `data_folder` is not specified. '
'In order to save data please set `data_folder` to location on your disk.')
for download_request in self.download_list:
download_request.set_save_response(save_data)
download_request.set_return_data(return_data)
download_request.set_data_folder(self.data_folder)
if save_data:
for folder in self.folder_list:
make_folder(os.path.join(self.data_folder, folder)) | def function[_preprocess_request, parameter[self, save_data, return_data]]:
constant[
Prepares requests for download and creates empty folders
:param save_data: Tells whether to save data or not
:type: bool
:param return_data: Tells whether to return data or not
:type: bool
]
if <ast.UnaryOp object at 0x7da1b18b7190> begin[:]
<ast.Raise object at 0x7da1b18b6620>
if <ast.BoolOp object at 0x7da1b18b7df0> begin[:]
<ast.Raise object at 0x7da1b18b4700>
for taget[name[download_request]] in starred[name[self].download_list] begin[:]
call[name[download_request].set_save_response, parameter[name[save_data]]]
call[name[download_request].set_return_data, parameter[name[return_data]]]
call[name[download_request].set_data_folder, parameter[name[self].data_folder]]
if name[save_data] begin[:]
for taget[name[folder]] in starred[name[self].folder_list] begin[:]
call[name[make_folder], parameter[call[name[os].path.join, parameter[name[self].data_folder, name[folder]]]]] | keyword[def] identifier[_preprocess_request] ( identifier[self] , identifier[save_data] , identifier[return_data] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[is_valid_request] ():
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[save_data] keyword[and] identifier[self] . identifier[data_folder] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[for] identifier[download_request] keyword[in] identifier[self] . identifier[download_list] :
identifier[download_request] . identifier[set_save_response] ( identifier[save_data] )
identifier[download_request] . identifier[set_return_data] ( identifier[return_data] )
identifier[download_request] . identifier[set_data_folder] ( identifier[self] . identifier[data_folder] )
keyword[if] identifier[save_data] :
keyword[for] identifier[folder] keyword[in] identifier[self] . identifier[folder_list] :
identifier[make_folder] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[data_folder] , identifier[folder] )) | def _preprocess_request(self, save_data, return_data):
"""
Prepares requests for download and creates empty folders
:param save_data: Tells whether to save data or not
:type: bool
:param return_data: Tells whether to return data or not
:type: bool
"""
if not self.is_valid_request():
raise ValueError('Cannot obtain data because request is invalid') # depends on [control=['if'], data=[]]
if save_data and self.data_folder is None:
raise ValueError('Request parameter `data_folder` is not specified. In order to save data please set `data_folder` to location on your disk.') # depends on [control=['if'], data=[]]
for download_request in self.download_list:
download_request.set_save_response(save_data)
download_request.set_return_data(return_data)
download_request.set_data_folder(self.data_folder) # depends on [control=['for'], data=['download_request']]
if save_data:
for folder in self.folder_list:
make_folder(os.path.join(self.data_folder, folder)) # depends on [control=['for'], data=['folder']] # depends on [control=['if'], data=[]] |
def __setuptools_version(self):
"""Read setuptools version from the underlying ez_setup script."""
# Read the script directly as a file instead of importing it as a
# Python module and reading the value from the loaded module's global
# DEFAULT_VERSION variable. Not all ez_setup scripts are compatible
# with all Python environments and so importing them would require
# doing so using a separate process run in the target Python
# environment instead of the current one.
f = open(self.script_path(), "r")
try:
matcher = re.compile(r'\s*DEFAULT_VERSION\s*=\s*"([^"]*)"\s*$')
for i, line in enumerate(f):
if i > 50:
break
match = matcher.match(line)
if match:
return match.group(1)
finally:
f.close()
self.__error("error parsing setuptools installation script '%s'" % (
self.script_path(),)) | def function[__setuptools_version, parameter[self]]:
constant[Read setuptools version from the underlying ez_setup script.]
variable[f] assign[=] call[name[open], parameter[call[name[self].script_path, parameter[]], constant[r]]]
<ast.Try object at 0x7da18bc71c60>
call[name[self].__error, parameter[binary_operation[constant[error parsing setuptools installation script '%s'] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18bc71c00>]]]]] | keyword[def] identifier[__setuptools_version] ( identifier[self] ):
literal[string]
identifier[f] = identifier[open] ( identifier[self] . identifier[script_path] (), literal[string] )
keyword[try] :
identifier[matcher] = identifier[re] . identifier[compile] ( literal[string] )
keyword[for] identifier[i] , identifier[line] keyword[in] identifier[enumerate] ( identifier[f] ):
keyword[if] identifier[i] > literal[int] :
keyword[break]
identifier[match] = identifier[matcher] . identifier[match] ( identifier[line] )
keyword[if] identifier[match] :
keyword[return] identifier[match] . identifier[group] ( literal[int] )
keyword[finally] :
identifier[f] . identifier[close] ()
identifier[self] . identifier[__error] ( literal[string] %(
identifier[self] . identifier[script_path] (),)) | def __setuptools_version(self):
"""Read setuptools version from the underlying ez_setup script.""" # Read the script directly as a file instead of importing it as a
# Python module and reading the value from the loaded module's global
# DEFAULT_VERSION variable. Not all ez_setup scripts are compatible
# with all Python environments and so importing them would require
# doing so using a separate process run in the target Python
# environment instead of the current one.
f = open(self.script_path(), 'r')
try:
matcher = re.compile('\\s*DEFAULT_VERSION\\s*=\\s*"([^"]*)"\\s*$')
for (i, line) in enumerate(f):
if i > 50:
break # depends on [control=['if'], data=[]]
match = matcher.match(line)
if match:
return match.group(1) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['try'], data=[]]
finally:
f.close()
self.__error("error parsing setuptools installation script '%s'" % (self.script_path(),)) |
def _initialize_messages_queues(self, chain_state: ChainState):
"""Initialize all the message queues with the transport.
Note:
All messages from the state queues must be pushed to the transport
before it's started. This is necessary to avoid a race where the
transport processes network messages too quickly, queueing new
messages before any of the previous messages, resulting in new
messages being out-of-order.
The Alarm task must be started before this method is called,
otherwise queues for channel closed while the node was offline
won't be properly cleared. It is not bad but it is suboptimal.
"""
assert not self.transport, f'Transport is running. node:{self!r}'
assert self.alarm.is_primed(), f'AlarmTask not primed. node:{self!r}'
events_queues = views.get_all_messagequeues(chain_state)
for queue_identifier, event_queue in events_queues.items():
self.start_health_check_for(queue_identifier.recipient)
for event in event_queue:
message = message_from_sendevent(event)
self.sign(message)
self.transport.send_async(queue_identifier, message) | def function[_initialize_messages_queues, parameter[self, chain_state]]:
constant[Initialize all the message queues with the transport.
Note:
All messages from the state queues must be pushed to the transport
before it's started. This is necessary to avoid a race where the
transport processes network messages too quickly, queueing new
messages before any of the previous messages, resulting in new
messages being out-of-order.
The Alarm task must be started before this method is called,
otherwise queues for channel closed while the node was offline
won't be properly cleared. It is not bad but it is suboptimal.
]
assert[<ast.UnaryOp object at 0x7da1b1708220>]
assert[call[name[self].alarm.is_primed, parameter[]]]
variable[events_queues] assign[=] call[name[views].get_all_messagequeues, parameter[name[chain_state]]]
for taget[tuple[[<ast.Name object at 0x7da1b170bdf0>, <ast.Name object at 0x7da1b170beb0>]]] in starred[call[name[events_queues].items, parameter[]]] begin[:]
call[name[self].start_health_check_for, parameter[name[queue_identifier].recipient]]
for taget[name[event]] in starred[name[event_queue]] begin[:]
variable[message] assign[=] call[name[message_from_sendevent], parameter[name[event]]]
call[name[self].sign, parameter[name[message]]]
call[name[self].transport.send_async, parameter[name[queue_identifier], name[message]]] | keyword[def] identifier[_initialize_messages_queues] ( identifier[self] , identifier[chain_state] : identifier[ChainState] ):
literal[string]
keyword[assert] keyword[not] identifier[self] . identifier[transport] , literal[string]
keyword[assert] identifier[self] . identifier[alarm] . identifier[is_primed] (), literal[string]
identifier[events_queues] = identifier[views] . identifier[get_all_messagequeues] ( identifier[chain_state] )
keyword[for] identifier[queue_identifier] , identifier[event_queue] keyword[in] identifier[events_queues] . identifier[items] ():
identifier[self] . identifier[start_health_check_for] ( identifier[queue_identifier] . identifier[recipient] )
keyword[for] identifier[event] keyword[in] identifier[event_queue] :
identifier[message] = identifier[message_from_sendevent] ( identifier[event] )
identifier[self] . identifier[sign] ( identifier[message] )
identifier[self] . identifier[transport] . identifier[send_async] ( identifier[queue_identifier] , identifier[message] ) | def _initialize_messages_queues(self, chain_state: ChainState):
"""Initialize all the message queues with the transport.
Note:
All messages from the state queues must be pushed to the transport
before it's started. This is necessary to avoid a race where the
transport processes network messages too quickly, queueing new
messages before any of the previous messages, resulting in new
messages being out-of-order.
The Alarm task must be started before this method is called,
otherwise queues for channel closed while the node was offline
won't be properly cleared. It is not bad but it is suboptimal.
"""
assert not self.transport, f'Transport is running. node:{self!r}'
assert self.alarm.is_primed(), f'AlarmTask not primed. node:{self!r}'
events_queues = views.get_all_messagequeues(chain_state)
for (queue_identifier, event_queue) in events_queues.items():
self.start_health_check_for(queue_identifier.recipient)
for event in event_queue:
message = message_from_sendevent(event)
self.sign(message)
self.transport.send_async(queue_identifier, message) # depends on [control=['for'], data=['event']] # depends on [control=['for'], data=[]] |
def set_spectator_mode(self, mode=True):
"""
When the flow is in spectator_mode, we have to disable signals, pickle dump and possible callbacks
A spectator can still operate on the flow but the new status of the flow won't be saved in
the pickle file. Usually the flow is in spectator mode when we are already running it via
the scheduler or other means and we should not interfere with its evolution.
This is the reason why signals and callbacks must be disabled.
Unfortunately preventing client-code from calling methods with side-effects when
the flow is in spectator mode is not easy (e.g. flow.cancel will cancel the tasks submitted to the
queue and the flow used by the scheduler won't see this change!
"""
# Set the flags of all the nodes in the flow.
mode = bool(mode)
self.in_spectator_mode = mode
for node in self.iflat_nodes():
node.in_spectator_mode = mode
# connect/disconnect signals depending on mode.
if not mode:
self.connect_signals()
else:
self.disconnect_signals() | def function[set_spectator_mode, parameter[self, mode]]:
constant[
When the flow is in spectator_mode, we have to disable signals, pickle dump and possible callbacks
A spectator can still operate on the flow but the new status of the flow won't be saved in
the pickle file. Usually the flow is in spectator mode when we are already running it via
the scheduler or other means and we should not interfere with its evolution.
This is the reason why signals and callbacks must be disabled.
Unfortunately preventing client-code from calling methods with side-effects when
the flow is in spectator mode is not easy (e.g. flow.cancel will cancel the tasks submitted to the
queue and the flow used by the scheduler won't see this change!
]
variable[mode] assign[=] call[name[bool], parameter[name[mode]]]
name[self].in_spectator_mode assign[=] name[mode]
for taget[name[node]] in starred[call[name[self].iflat_nodes, parameter[]]] begin[:]
name[node].in_spectator_mode assign[=] name[mode]
if <ast.UnaryOp object at 0x7da20c6e5000> begin[:]
call[name[self].connect_signals, parameter[]] | keyword[def] identifier[set_spectator_mode] ( identifier[self] , identifier[mode] = keyword[True] ):
literal[string]
identifier[mode] = identifier[bool] ( identifier[mode] )
identifier[self] . identifier[in_spectator_mode] = identifier[mode]
keyword[for] identifier[node] keyword[in] identifier[self] . identifier[iflat_nodes] ():
identifier[node] . identifier[in_spectator_mode] = identifier[mode]
keyword[if] keyword[not] identifier[mode] :
identifier[self] . identifier[connect_signals] ()
keyword[else] :
identifier[self] . identifier[disconnect_signals] () | def set_spectator_mode(self, mode=True):
"""
When the flow is in spectator_mode, we have to disable signals, pickle dump and possible callbacks
A spectator can still operate on the flow but the new status of the flow won't be saved in
the pickle file. Usually the flow is in spectator mode when we are already running it via
the scheduler or other means and we should not interfere with its evolution.
This is the reason why signals and callbacks must be disabled.
Unfortunately preventing client-code from calling methods with side-effects when
the flow is in spectator mode is not easy (e.g. flow.cancel will cancel the tasks submitted to the
queue and the flow used by the scheduler won't see this change!
"""
# Set the flags of all the nodes in the flow.
mode = bool(mode)
self.in_spectator_mode = mode
for node in self.iflat_nodes():
node.in_spectator_mode = mode # depends on [control=['for'], data=['node']]
# connect/disconnect signals depending on mode.
if not mode:
self.connect_signals() # depends on [control=['if'], data=[]]
else:
self.disconnect_signals() |
def put(self, url: StrOrURL,
*, data: Any=None, **kwargs: Any) -> '_RequestContextManager':
"""Perform HTTP PUT request."""
return _RequestContextManager(
self._request(hdrs.METH_PUT, url,
data=data,
**kwargs)) | def function[put, parameter[self, url]]:
constant[Perform HTTP PUT request.]
return[call[name[_RequestContextManager], parameter[call[name[self]._request, parameter[name[hdrs].METH_PUT, name[url]]]]]] | keyword[def] identifier[put] ( identifier[self] , identifier[url] : identifier[StrOrURL] ,
*, identifier[data] : identifier[Any] = keyword[None] ,** identifier[kwargs] : identifier[Any] )-> literal[string] :
literal[string]
keyword[return] identifier[_RequestContextManager] (
identifier[self] . identifier[_request] ( identifier[hdrs] . identifier[METH_PUT] , identifier[url] ,
identifier[data] = identifier[data] ,
** identifier[kwargs] )) | def put(self, url: StrOrURL, *, data: Any=None, **kwargs: Any) -> '_RequestContextManager':
"""Perform HTTP PUT request."""
return _RequestContextManager(self._request(hdrs.METH_PUT, url, data=data, **kwargs)) |
def get_distance(a, b, xaxis=True):
"""
Returns the distance between two blast HSPs.
"""
if xaxis:
arange = ("0", a.qstart, a.qstop, a.orientation) # 0 is the dummy chromosome
brange = ("0", b.qstart, b.qstop, b.orientation)
else:
arange = ("0", a.sstart, a.sstop, a.orientation)
brange = ("0", b.sstart, b.sstop, b.orientation)
dist, oo = range_distance(arange, brange, distmode="ee")
dist = abs(dist)
return dist | def function[get_distance, parameter[a, b, xaxis]]:
constant[
Returns the distance between two blast HSPs.
]
if name[xaxis] begin[:]
variable[arange] assign[=] tuple[[<ast.Constant object at 0x7da1b088ee00>, <ast.Attribute object at 0x7da1b088ed40>, <ast.Attribute object at 0x7da1b088edd0>, <ast.Attribute object at 0x7da1b088ee90>]]
variable[brange] assign[=] tuple[[<ast.Constant object at 0x7da1b088f100>, <ast.Attribute object at 0x7da1b088f040>, <ast.Attribute object at 0x7da1b088f0d0>, <ast.Attribute object at 0x7da1b088f310>]]
<ast.Tuple object at 0x7da1b088d150> assign[=] call[name[range_distance], parameter[name[arange], name[brange]]]
variable[dist] assign[=] call[name[abs], parameter[name[dist]]]
return[name[dist]] | keyword[def] identifier[get_distance] ( identifier[a] , identifier[b] , identifier[xaxis] = keyword[True] ):
literal[string]
keyword[if] identifier[xaxis] :
identifier[arange] =( literal[string] , identifier[a] . identifier[qstart] , identifier[a] . identifier[qstop] , identifier[a] . identifier[orientation] )
identifier[brange] =( literal[string] , identifier[b] . identifier[qstart] , identifier[b] . identifier[qstop] , identifier[b] . identifier[orientation] )
keyword[else] :
identifier[arange] =( literal[string] , identifier[a] . identifier[sstart] , identifier[a] . identifier[sstop] , identifier[a] . identifier[orientation] )
identifier[brange] =( literal[string] , identifier[b] . identifier[sstart] , identifier[b] . identifier[sstop] , identifier[b] . identifier[orientation] )
identifier[dist] , identifier[oo] = identifier[range_distance] ( identifier[arange] , identifier[brange] , identifier[distmode] = literal[string] )
identifier[dist] = identifier[abs] ( identifier[dist] )
keyword[return] identifier[dist] | def get_distance(a, b, xaxis=True):
"""
Returns the distance between two blast HSPs.
"""
if xaxis:
arange = ('0', a.qstart, a.qstop, a.orientation) # 0 is the dummy chromosome
brange = ('0', b.qstart, b.qstop, b.orientation) # depends on [control=['if'], data=[]]
else:
arange = ('0', a.sstart, a.sstop, a.orientation)
brange = ('0', b.sstart, b.sstop, b.orientation)
(dist, oo) = range_distance(arange, brange, distmode='ee')
dist = abs(dist)
return dist |
def get_obsolete_messages(self, domain):
"""
Returns obsolete valid messages after operation.
@type domain: str
@rtype: dict
"""
if domain not in self.domains:
raise ValueError('Invalid domain: {0}'.format(domain))
if domain not in self.messages or \
'obsolete' not in self.messages[domain]:
self._process_domain(domain)
return self.messages[domain]['obsolete'] | def function[get_obsolete_messages, parameter[self, domain]]:
constant[
Returns obsolete valid messages after operation.
@type domain: str
@rtype: dict
]
if compare[name[domain] <ast.NotIn object at 0x7da2590d7190> name[self].domains] begin[:]
<ast.Raise object at 0x7da1b20d5150>
if <ast.BoolOp object at 0x7da1b20d51b0> begin[:]
call[name[self]._process_domain, parameter[name[domain]]]
return[call[call[name[self].messages][name[domain]]][constant[obsolete]]] | keyword[def] identifier[get_obsolete_messages] ( identifier[self] , identifier[domain] ):
literal[string]
keyword[if] identifier[domain] keyword[not] keyword[in] identifier[self] . identifier[domains] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[domain] ))
keyword[if] identifier[domain] keyword[not] keyword[in] identifier[self] . identifier[messages] keyword[or] literal[string] keyword[not] keyword[in] identifier[self] . identifier[messages] [ identifier[domain] ]:
identifier[self] . identifier[_process_domain] ( identifier[domain] )
keyword[return] identifier[self] . identifier[messages] [ identifier[domain] ][ literal[string] ] | def get_obsolete_messages(self, domain):
"""
Returns obsolete valid messages after operation.
@type domain: str
@rtype: dict
"""
if domain not in self.domains:
raise ValueError('Invalid domain: {0}'.format(domain)) # depends on [control=['if'], data=['domain']]
if domain not in self.messages or 'obsolete' not in self.messages[domain]:
self._process_domain(domain) # depends on [control=['if'], data=[]]
return self.messages[domain]['obsolete'] |
def decode_path(file_path):
"""Turn a path name into unicode."""
if file_path is None:
return
if isinstance(file_path, six.binary_type):
file_path = file_path.decode(sys.getfilesystemencoding())
return file_path | def function[decode_path, parameter[file_path]]:
constant[Turn a path name into unicode.]
if compare[name[file_path] is constant[None]] begin[:]
return[None]
if call[name[isinstance], parameter[name[file_path], name[six].binary_type]] begin[:]
variable[file_path] assign[=] call[name[file_path].decode, parameter[call[name[sys].getfilesystemencoding, parameter[]]]]
return[name[file_path]] | keyword[def] identifier[decode_path] ( identifier[file_path] ):
literal[string]
keyword[if] identifier[file_path] keyword[is] keyword[None] :
keyword[return]
keyword[if] identifier[isinstance] ( identifier[file_path] , identifier[six] . identifier[binary_type] ):
identifier[file_path] = identifier[file_path] . identifier[decode] ( identifier[sys] . identifier[getfilesystemencoding] ())
keyword[return] identifier[file_path] | def decode_path(file_path):
"""Turn a path name into unicode."""
if file_path is None:
return # depends on [control=['if'], data=[]]
if isinstance(file_path, six.binary_type):
file_path = file_path.decode(sys.getfilesystemencoding()) # depends on [control=['if'], data=[]]
return file_path |
def join(self, queue_name, *, interval=100, timeout=None):
"""Wait for all the messages on the given queue to be
processed. This method is only meant to be used in tests to
wait for all the messages in a queue to be processed.
Raises:
QueueJoinTimeout: When the timeout elapses.
Parameters:
queue_name(str): The queue to wait on.
interval(Optional[int]): The interval, in milliseconds, at
which to check the queues.
timeout(Optional[int]): The max amount of time, in
milliseconds, to wait on this queue.
"""
deadline = timeout and time.monotonic() + timeout / 1000
while True:
if deadline and time.monotonic() >= deadline:
raise QueueJoinTimeout(queue_name)
size = 0
for name in (queue_name, dq_name(queue_name)):
size += self.do_qsize(name)
if size == 0:
return
time.sleep(interval / 1000) | def function[join, parameter[self, queue_name]]:
constant[Wait for all the messages on the given queue to be
processed. This method is only meant to be used in tests to
wait for all the messages in a queue to be processed.
Raises:
QueueJoinTimeout: When the timeout elapses.
Parameters:
queue_name(str): The queue to wait on.
interval(Optional[int]): The interval, in milliseconds, at
which to check the queues.
timeout(Optional[int]): The max amount of time, in
milliseconds, to wait on this queue.
]
variable[deadline] assign[=] <ast.BoolOp object at 0x7da1b1662530>
while constant[True] begin[:]
if <ast.BoolOp object at 0x7da1b16612a0> begin[:]
<ast.Raise object at 0x7da1b1662b30>
variable[size] assign[=] constant[0]
for taget[name[name]] in starred[tuple[[<ast.Name object at 0x7da1b16618d0>, <ast.Call object at 0x7da1b1660490>]]] begin[:]
<ast.AugAssign object at 0x7da1b1660430>
if compare[name[size] equal[==] constant[0]] begin[:]
return[None]
call[name[time].sleep, parameter[binary_operation[name[interval] / constant[1000]]]] | keyword[def] identifier[join] ( identifier[self] , identifier[queue_name] ,*, identifier[interval] = literal[int] , identifier[timeout] = keyword[None] ):
literal[string]
identifier[deadline] = identifier[timeout] keyword[and] identifier[time] . identifier[monotonic] ()+ identifier[timeout] / literal[int]
keyword[while] keyword[True] :
keyword[if] identifier[deadline] keyword[and] identifier[time] . identifier[monotonic] ()>= identifier[deadline] :
keyword[raise] identifier[QueueJoinTimeout] ( identifier[queue_name] )
identifier[size] = literal[int]
keyword[for] identifier[name] keyword[in] ( identifier[queue_name] , identifier[dq_name] ( identifier[queue_name] )):
identifier[size] += identifier[self] . identifier[do_qsize] ( identifier[name] )
keyword[if] identifier[size] == literal[int] :
keyword[return]
identifier[time] . identifier[sleep] ( identifier[interval] / literal[int] ) | def join(self, queue_name, *, interval=100, timeout=None):
"""Wait for all the messages on the given queue to be
processed. This method is only meant to be used in tests to
wait for all the messages in a queue to be processed.
Raises:
QueueJoinTimeout: When the timeout elapses.
Parameters:
queue_name(str): The queue to wait on.
interval(Optional[int]): The interval, in milliseconds, at
which to check the queues.
timeout(Optional[int]): The max amount of time, in
milliseconds, to wait on this queue.
"""
deadline = timeout and time.monotonic() + timeout / 1000
while True:
if deadline and time.monotonic() >= deadline:
raise QueueJoinTimeout(queue_name) # depends on [control=['if'], data=[]]
size = 0
for name in (queue_name, dq_name(queue_name)):
size += self.do_qsize(name) # depends on [control=['for'], data=['name']]
if size == 0:
return # depends on [control=['if'], data=[]]
time.sleep(interval / 1000) # depends on [control=['while'], data=[]] |
def get_pull_command(self, remote=None, revision=None):
"""Get the command to pull changes from a remote repository into the local repository."""
command = ['hg', 'pull']
if remote:
command.append(remote)
if revision:
command.append('--rev=%s' % revision)
return command | def function[get_pull_command, parameter[self, remote, revision]]:
constant[Get the command to pull changes from a remote repository into the local repository.]
variable[command] assign[=] list[[<ast.Constant object at 0x7da1b0a21f60>, <ast.Constant object at 0x7da1b0a23d60>]]
if name[remote] begin[:]
call[name[command].append, parameter[name[remote]]]
if name[revision] begin[:]
call[name[command].append, parameter[binary_operation[constant[--rev=%s] <ast.Mod object at 0x7da2590d6920> name[revision]]]]
return[name[command]] | keyword[def] identifier[get_pull_command] ( identifier[self] , identifier[remote] = keyword[None] , identifier[revision] = keyword[None] ):
literal[string]
identifier[command] =[ literal[string] , literal[string] ]
keyword[if] identifier[remote] :
identifier[command] . identifier[append] ( identifier[remote] )
keyword[if] identifier[revision] :
identifier[command] . identifier[append] ( literal[string] % identifier[revision] )
keyword[return] identifier[command] | def get_pull_command(self, remote=None, revision=None):
"""Get the command to pull changes from a remote repository into the local repository."""
command = ['hg', 'pull']
if remote:
command.append(remote) # depends on [control=['if'], data=[]]
if revision:
command.append('--rev=%s' % revision) # depends on [control=['if'], data=[]]
return command |
async def send_location(self, chat_id: typing.Union[base.Integer, base.String],
latitude: base.Float, longitude: base.Float,
live_period: typing.Union[base.Integer, None] = None,
disable_notification: typing.Union[base.Boolean, None] = None,
reply_to_message_id: typing.Union[base.Integer, None] = None,
reply_markup: typing.Union[types.InlineKeyboardMarkup,
types.ReplyKeyboardMarkup,
types.ReplyKeyboardRemove,
types.ForceReply, None] = None) -> types.Message:
"""
Use this method to send point on the map.
Source: https://core.telegram.org/bots/api#sendlocation
:param chat_id: Unique identifier for the target chat or username of the target channel
:type chat_id: :obj:`typing.Union[base.Integer, base.String]`
:param latitude: Latitude of the location
:type latitude: :obj:`base.Float`
:param longitude: Longitude of the location
:type longitude: :obj:`base.Float`
:param live_period: Period in seconds for which the location will be updated
:type live_period: :obj:`typing.Union[base.Integer, None]`
:param disable_notification: Sends the message silently. Users will receive a notification with no sound
:type disable_notification: :obj:`typing.Union[base.Boolean, None]`
:param reply_to_message_id: If the message is a reply, ID of the original message
:type reply_to_message_id: :obj:`typing.Union[base.Integer, None]`
:param reply_markup: Additional interface options
:type reply_markup: :obj:`typing.Union[types.InlineKeyboardMarkup,
types.ReplyKeyboardMarkup, types.ReplyKeyboardRemove, types.ForceReply, None]`
:return: On success, the sent Message is returned
:rtype: :obj:`types.Message`
"""
reply_markup = prepare_arg(reply_markup)
payload = generate_payload(**locals())
result = await self.request(api.Methods.SEND_LOCATION, payload)
return types.Message(**result) | <ast.AsyncFunctionDef object at 0x7da1b17a8760> | keyword[async] keyword[def] identifier[send_location] ( identifier[self] , identifier[chat_id] : identifier[typing] . identifier[Union] [ identifier[base] . identifier[Integer] , identifier[base] . identifier[String] ],
identifier[latitude] : identifier[base] . identifier[Float] , identifier[longitude] : identifier[base] . identifier[Float] ,
identifier[live_period] : identifier[typing] . identifier[Union] [ identifier[base] . identifier[Integer] , keyword[None] ]= keyword[None] ,
identifier[disable_notification] : identifier[typing] . identifier[Union] [ identifier[base] . identifier[Boolean] , keyword[None] ]= keyword[None] ,
identifier[reply_to_message_id] : identifier[typing] . identifier[Union] [ identifier[base] . identifier[Integer] , keyword[None] ]= keyword[None] ,
identifier[reply_markup] : identifier[typing] . identifier[Union] [ identifier[types] . identifier[InlineKeyboardMarkup] ,
identifier[types] . identifier[ReplyKeyboardMarkup] ,
identifier[types] . identifier[ReplyKeyboardRemove] ,
identifier[types] . identifier[ForceReply] , keyword[None] ]= keyword[None] )-> identifier[types] . identifier[Message] :
literal[string]
identifier[reply_markup] = identifier[prepare_arg] ( identifier[reply_markup] )
identifier[payload] = identifier[generate_payload] (** identifier[locals] ())
identifier[result] = keyword[await] identifier[self] . identifier[request] ( identifier[api] . identifier[Methods] . identifier[SEND_LOCATION] , identifier[payload] )
keyword[return] identifier[types] . identifier[Message] (** identifier[result] ) | async def send_location(self, chat_id: typing.Union[base.Integer, base.String], latitude: base.Float, longitude: base.Float, live_period: typing.Union[base.Integer, None]=None, disable_notification: typing.Union[base.Boolean, None]=None, reply_to_message_id: typing.Union[base.Integer, None]=None, reply_markup: typing.Union[types.InlineKeyboardMarkup, types.ReplyKeyboardMarkup, types.ReplyKeyboardRemove, types.ForceReply, None]=None) -> types.Message:
"""
Use this method to send point on the map.
Source: https://core.telegram.org/bots/api#sendlocation
:param chat_id: Unique identifier for the target chat or username of the target channel
:type chat_id: :obj:`typing.Union[base.Integer, base.String]`
:param latitude: Latitude of the location
:type latitude: :obj:`base.Float`
:param longitude: Longitude of the location
:type longitude: :obj:`base.Float`
:param live_period: Period in seconds for which the location will be updated
:type live_period: :obj:`typing.Union[base.Integer, None]`
:param disable_notification: Sends the message silently. Users will receive a notification with no sound
:type disable_notification: :obj:`typing.Union[base.Boolean, None]`
:param reply_to_message_id: If the message is a reply, ID of the original message
:type reply_to_message_id: :obj:`typing.Union[base.Integer, None]`
:param reply_markup: Additional interface options
:type reply_markup: :obj:`typing.Union[types.InlineKeyboardMarkup,
types.ReplyKeyboardMarkup, types.ReplyKeyboardRemove, types.ForceReply, None]`
:return: On success, the sent Message is returned
:rtype: :obj:`types.Message`
"""
reply_markup = prepare_arg(reply_markup)
payload = generate_payload(**locals())
result = await self.request(api.Methods.SEND_LOCATION, payload)
return types.Message(**result) |
def on_add(self, widget, data=None):
"""Create a global variable with default value and select its row
Triggered when the add button in the global variables tab is clicked.
"""
gv_name = "new_global_%s" % self.global_variable_counter
self.global_variable_counter += 1
try:
self.model.global_variable_manager.set_variable(gv_name, None)
except (RuntimeError, AttributeError, TypeError) as e:
logger.warning("Addition of new global variable '{0}' failed: {1}".format(gv_name, e))
self.select_entry(gv_name)
return True | def function[on_add, parameter[self, widget, data]]:
constant[Create a global variable with default value and select its row
Triggered when the add button in the global variables tab is clicked.
]
variable[gv_name] assign[=] binary_operation[constant[new_global_%s] <ast.Mod object at 0x7da2590d6920> name[self].global_variable_counter]
<ast.AugAssign object at 0x7da18bcc9240>
<ast.Try object at 0x7da18bcc90c0>
call[name[self].select_entry, parameter[name[gv_name]]]
return[constant[True]] | keyword[def] identifier[on_add] ( identifier[self] , identifier[widget] , identifier[data] = keyword[None] ):
literal[string]
identifier[gv_name] = literal[string] % identifier[self] . identifier[global_variable_counter]
identifier[self] . identifier[global_variable_counter] += literal[int]
keyword[try] :
identifier[self] . identifier[model] . identifier[global_variable_manager] . identifier[set_variable] ( identifier[gv_name] , keyword[None] )
keyword[except] ( identifier[RuntimeError] , identifier[AttributeError] , identifier[TypeError] ) keyword[as] identifier[e] :
identifier[logger] . identifier[warning] ( literal[string] . identifier[format] ( identifier[gv_name] , identifier[e] ))
identifier[self] . identifier[select_entry] ( identifier[gv_name] )
keyword[return] keyword[True] | def on_add(self, widget, data=None):
"""Create a global variable with default value and select its row
Triggered when the add button in the global variables tab is clicked.
"""
gv_name = 'new_global_%s' % self.global_variable_counter
self.global_variable_counter += 1
try:
self.model.global_variable_manager.set_variable(gv_name, None) # depends on [control=['try'], data=[]]
except (RuntimeError, AttributeError, TypeError) as e:
logger.warning("Addition of new global variable '{0}' failed: {1}".format(gv_name, e)) # depends on [control=['except'], data=['e']]
self.select_entry(gv_name)
return True |
def factory(self, url):
'''
Return (expiration, obj) corresponding to provided url, exercising the
cache_policy as necessary.
'''
try:
return self.fetch(url)
except BaseException as exc:
logger.exception('Reppy cache fetch error on %s' % url)
return self.cache_policy.exception(url, exc) | def function[factory, parameter[self, url]]:
constant[
Return (expiration, obj) corresponding to provided url, exercising the
cache_policy as necessary.
]
<ast.Try object at 0x7da18fe939d0> | keyword[def] identifier[factory] ( identifier[self] , identifier[url] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] . identifier[fetch] ( identifier[url] )
keyword[except] identifier[BaseException] keyword[as] identifier[exc] :
identifier[logger] . identifier[exception] ( literal[string] % identifier[url] )
keyword[return] identifier[self] . identifier[cache_policy] . identifier[exception] ( identifier[url] , identifier[exc] ) | def factory(self, url):
"""
Return (expiration, obj) corresponding to provided url, exercising the
cache_policy as necessary.
"""
try:
return self.fetch(url) # depends on [control=['try'], data=[]]
except BaseException as exc:
logger.exception('Reppy cache fetch error on %s' % url)
return self.cache_policy.exception(url, exc) # depends on [control=['except'], data=['exc']] |
def init_group_write(self, dst_addr=1, data=None, dptsize=0):
"""Initialize the CEMI frame for a group write operation."""
self.init_group(dst_addr)
# unnumbered data packet, group write
self.tpci_apci = 0x00 * 256 + 0x80
self.dptsize = dptsize
if data is None:
self.data = [0]
else:
self.data = data | def function[init_group_write, parameter[self, dst_addr, data, dptsize]]:
constant[Initialize the CEMI frame for a group write operation.]
call[name[self].init_group, parameter[name[dst_addr]]]
name[self].tpci_apci assign[=] binary_operation[binary_operation[constant[0] * constant[256]] + constant[128]]
name[self].dptsize assign[=] name[dptsize]
if compare[name[data] is constant[None]] begin[:]
name[self].data assign[=] list[[<ast.Constant object at 0x7da20e9b2b30>]] | keyword[def] identifier[init_group_write] ( identifier[self] , identifier[dst_addr] = literal[int] , identifier[data] = keyword[None] , identifier[dptsize] = literal[int] ):
literal[string]
identifier[self] . identifier[init_group] ( identifier[dst_addr] )
identifier[self] . identifier[tpci_apci] = literal[int] * literal[int] + literal[int]
identifier[self] . identifier[dptsize] = identifier[dptsize]
keyword[if] identifier[data] keyword[is] keyword[None] :
identifier[self] . identifier[data] =[ literal[int] ]
keyword[else] :
identifier[self] . identifier[data] = identifier[data] | def init_group_write(self, dst_addr=1, data=None, dptsize=0):
"""Initialize the CEMI frame for a group write operation."""
self.init_group(dst_addr)
# unnumbered data packet, group write
self.tpci_apci = 0 * 256 + 128
self.dptsize = dptsize
if data is None:
self.data = [0] # depends on [control=['if'], data=[]]
else:
self.data = data |
def task_master(self):
"""A `TaskMaster` object for manipulating work"""
if self._task_master is None:
self._task_master = build_task_master(self.config)
return self._task_master | def function[task_master, parameter[self]]:
constant[A `TaskMaster` object for manipulating work]
if compare[name[self]._task_master is constant[None]] begin[:]
name[self]._task_master assign[=] call[name[build_task_master], parameter[name[self].config]]
return[name[self]._task_master] | keyword[def] identifier[task_master] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_task_master] keyword[is] keyword[None] :
identifier[self] . identifier[_task_master] = identifier[build_task_master] ( identifier[self] . identifier[config] )
keyword[return] identifier[self] . identifier[_task_master] | def task_master(self):
"""A `TaskMaster` object for manipulating work"""
if self._task_master is None:
self._task_master = build_task_master(self.config) # depends on [control=['if'], data=[]]
return self._task_master |
def read_key(suppress=False):
"""
Blocks until a keyboard event happens, then returns that event's name or,
if missing, its scan code.
"""
event = read_event(suppress)
return event.name or event.scan_code | def function[read_key, parameter[suppress]]:
constant[
Blocks until a keyboard event happens, then returns that event's name or,
if missing, its scan code.
]
variable[event] assign[=] call[name[read_event], parameter[name[suppress]]]
return[<ast.BoolOp object at 0x7da1b1bed540>] | keyword[def] identifier[read_key] ( identifier[suppress] = keyword[False] ):
literal[string]
identifier[event] = identifier[read_event] ( identifier[suppress] )
keyword[return] identifier[event] . identifier[name] keyword[or] identifier[event] . identifier[scan_code] | def read_key(suppress=False):
"""
Blocks until a keyboard event happens, then returns that event's name or,
if missing, its scan code.
"""
event = read_event(suppress)
return event.name or event.scan_code |
def path_to_url(path):
"""
Convert a path to a file: URL. The path will be made absolute.
"""
path = os.path.normcase(os.path.abspath(path))
if _drive_re.match(path):
path = path[0] + '|' + path[2:]
url = urllib.quote(path)
url = url.replace(os.path.sep, '/')
url = url.lstrip('/')
return 'file:///' + url | def function[path_to_url, parameter[path]]:
constant[
Convert a path to a file: URL. The path will be made absolute.
]
variable[path] assign[=] call[name[os].path.normcase, parameter[call[name[os].path.abspath, parameter[name[path]]]]]
if call[name[_drive_re].match, parameter[name[path]]] begin[:]
variable[path] assign[=] binary_operation[binary_operation[call[name[path]][constant[0]] + constant[|]] + call[name[path]][<ast.Slice object at 0x7da2045675e0>]]
variable[url] assign[=] call[name[urllib].quote, parameter[name[path]]]
variable[url] assign[=] call[name[url].replace, parameter[name[os].path.sep, constant[/]]]
variable[url] assign[=] call[name[url].lstrip, parameter[constant[/]]]
return[binary_operation[constant[file:///] + name[url]]] | keyword[def] identifier[path_to_url] ( identifier[path] ):
literal[string]
identifier[path] = identifier[os] . identifier[path] . identifier[normcase] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[path] ))
keyword[if] identifier[_drive_re] . identifier[match] ( identifier[path] ):
identifier[path] = identifier[path] [ literal[int] ]+ literal[string] + identifier[path] [ literal[int] :]
identifier[url] = identifier[urllib] . identifier[quote] ( identifier[path] )
identifier[url] = identifier[url] . identifier[replace] ( identifier[os] . identifier[path] . identifier[sep] , literal[string] )
identifier[url] = identifier[url] . identifier[lstrip] ( literal[string] )
keyword[return] literal[string] + identifier[url] | def path_to_url(path):
"""
Convert a path to a file: URL. The path will be made absolute.
"""
path = os.path.normcase(os.path.abspath(path))
if _drive_re.match(path):
path = path[0] + '|' + path[2:] # depends on [control=['if'], data=[]]
url = urllib.quote(path)
url = url.replace(os.path.sep, '/')
url = url.lstrip('/')
return 'file:///' + url |
def append_to_circuit(self, circuit, simplify=True):
"""Append Pauli gates to `Circuit`."""
if simplify:
term = self.simplify()
else:
term = self
for op in term.ops[::-1]:
gate = op.op.lower()
if gate != "i":
getattr(circuit, gate)[op.n] | def function[append_to_circuit, parameter[self, circuit, simplify]]:
constant[Append Pauli gates to `Circuit`.]
if name[simplify] begin[:]
variable[term] assign[=] call[name[self].simplify, parameter[]]
for taget[name[op]] in starred[call[name[term].ops][<ast.Slice object at 0x7da18fe92740>]] begin[:]
variable[gate] assign[=] call[name[op].op.lower, parameter[]]
if compare[name[gate] not_equal[!=] constant[i]] begin[:]
call[call[name[getattr], parameter[name[circuit], name[gate]]]][name[op].n] | keyword[def] identifier[append_to_circuit] ( identifier[self] , identifier[circuit] , identifier[simplify] = keyword[True] ):
literal[string]
keyword[if] identifier[simplify] :
identifier[term] = identifier[self] . identifier[simplify] ()
keyword[else] :
identifier[term] = identifier[self]
keyword[for] identifier[op] keyword[in] identifier[term] . identifier[ops] [::- literal[int] ]:
identifier[gate] = identifier[op] . identifier[op] . identifier[lower] ()
keyword[if] identifier[gate] != literal[string] :
identifier[getattr] ( identifier[circuit] , identifier[gate] )[ identifier[op] . identifier[n] ] | def append_to_circuit(self, circuit, simplify=True):
"""Append Pauli gates to `Circuit`."""
if simplify:
term = self.simplify() # depends on [control=['if'], data=[]]
else:
term = self
for op in term.ops[::-1]:
gate = op.op.lower()
if gate != 'i':
getattr(circuit, gate)[op.n] # depends on [control=['if'], data=['gate']] # depends on [control=['for'], data=['op']] |
def _rows_int2date(self, rows):
"""
Replaces start and end dates in the row set with their integer representation
:param list[dict[str,T]] rows: The list of rows.
"""
for row in rows:
if self._date_type == 'str':
row[self._key_start_date] = datetime.date.fromordinal(row[self._key_start_date]).isoformat()
row[self._key_end_date] = datetime.date.fromordinal(row[self._key_end_date]).isoformat()
elif self._date_type == 'date':
row[self._key_start_date] = datetime.date.fromordinal(row[self._key_start_date])
row[self._key_end_date] = datetime.date.fromordinal(row[self._key_end_date])
elif self._date_type == 'int':
# Nothing to do.
pass
else:
raise ValueError('Unexpected date type {0!s}'.format(self._date_type)) | def function[_rows_int2date, parameter[self, rows]]:
constant[
Replaces start and end dates in the row set with their integer representation
:param list[dict[str,T]] rows: The list of rows.
]
for taget[name[row]] in starred[name[rows]] begin[:]
if compare[name[self]._date_type equal[==] constant[str]] begin[:]
call[name[row]][name[self]._key_start_date] assign[=] call[call[name[datetime].date.fromordinal, parameter[call[name[row]][name[self]._key_start_date]]].isoformat, parameter[]]
call[name[row]][name[self]._key_end_date] assign[=] call[call[name[datetime].date.fromordinal, parameter[call[name[row]][name[self]._key_end_date]]].isoformat, parameter[]] | keyword[def] identifier[_rows_int2date] ( identifier[self] , identifier[rows] ):
literal[string]
keyword[for] identifier[row] keyword[in] identifier[rows] :
keyword[if] identifier[self] . identifier[_date_type] == literal[string] :
identifier[row] [ identifier[self] . identifier[_key_start_date] ]= identifier[datetime] . identifier[date] . identifier[fromordinal] ( identifier[row] [ identifier[self] . identifier[_key_start_date] ]). identifier[isoformat] ()
identifier[row] [ identifier[self] . identifier[_key_end_date] ]= identifier[datetime] . identifier[date] . identifier[fromordinal] ( identifier[row] [ identifier[self] . identifier[_key_end_date] ]). identifier[isoformat] ()
keyword[elif] identifier[self] . identifier[_date_type] == literal[string] :
identifier[row] [ identifier[self] . identifier[_key_start_date] ]= identifier[datetime] . identifier[date] . identifier[fromordinal] ( identifier[row] [ identifier[self] . identifier[_key_start_date] ])
identifier[row] [ identifier[self] . identifier[_key_end_date] ]= identifier[datetime] . identifier[date] . identifier[fromordinal] ( identifier[row] [ identifier[self] . identifier[_key_end_date] ])
keyword[elif] identifier[self] . identifier[_date_type] == literal[string] :
keyword[pass]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[self] . identifier[_date_type] )) | def _rows_int2date(self, rows):
"""
Replaces start and end dates in the row set with their integer representation
:param list[dict[str,T]] rows: The list of rows.
"""
for row in rows:
if self._date_type == 'str':
row[self._key_start_date] = datetime.date.fromordinal(row[self._key_start_date]).isoformat()
row[self._key_end_date] = datetime.date.fromordinal(row[self._key_end_date]).isoformat() # depends on [control=['if'], data=[]]
elif self._date_type == 'date':
row[self._key_start_date] = datetime.date.fromordinal(row[self._key_start_date])
row[self._key_end_date] = datetime.date.fromordinal(row[self._key_end_date]) # depends on [control=['if'], data=[]]
elif self._date_type == 'int':
# Nothing to do.
pass # depends on [control=['if'], data=[]]
else:
raise ValueError('Unexpected date type {0!s}'.format(self._date_type)) # depends on [control=['for'], data=['row']] |
def shelld(ndim, array):
# Works!, use this as example for "I/O" parameters
"""
Sort a double precision array using the Shell Sort algorithm.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/shelld_c.html
:param ndim: Dimension of the array.
:type ndim: int
:param array: The array to be sorted.
:type array: Array of floats
:return: The sorted array.
:rtype: Array of floats
"""
array = stypes.toDoubleVector(array)
ndim = ctypes.c_int(ndim)
libspice.shelld_c(ndim, ctypes.cast(array, ctypes.POINTER(ctypes.c_double)))
return stypes.cVectorToPython(array) | def function[shelld, parameter[ndim, array]]:
constant[
Sort a double precision array using the Shell Sort algorithm.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/shelld_c.html
:param ndim: Dimension of the array.
:type ndim: int
:param array: The array to be sorted.
:type array: Array of floats
:return: The sorted array.
:rtype: Array of floats
]
variable[array] assign[=] call[name[stypes].toDoubleVector, parameter[name[array]]]
variable[ndim] assign[=] call[name[ctypes].c_int, parameter[name[ndim]]]
call[name[libspice].shelld_c, parameter[name[ndim], call[name[ctypes].cast, parameter[name[array], call[name[ctypes].POINTER, parameter[name[ctypes].c_double]]]]]]
return[call[name[stypes].cVectorToPython, parameter[name[array]]]] | keyword[def] identifier[shelld] ( identifier[ndim] , identifier[array] ):
literal[string]
identifier[array] = identifier[stypes] . identifier[toDoubleVector] ( identifier[array] )
identifier[ndim] = identifier[ctypes] . identifier[c_int] ( identifier[ndim] )
identifier[libspice] . identifier[shelld_c] ( identifier[ndim] , identifier[ctypes] . identifier[cast] ( identifier[array] , identifier[ctypes] . identifier[POINTER] ( identifier[ctypes] . identifier[c_double] )))
keyword[return] identifier[stypes] . identifier[cVectorToPython] ( identifier[array] ) | def shelld(ndim, array):
# Works!, use this as example for "I/O" parameters
'\n Sort a double precision array using the Shell Sort algorithm.\n\n http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/shelld_c.html\n\n :param ndim: Dimension of the array.\n :type ndim: int\n :param array: The array to be sorted.\n :type array: Array of floats\n :return: The sorted array.\n :rtype: Array of floats\n '
array = stypes.toDoubleVector(array)
ndim = ctypes.c_int(ndim)
libspice.shelld_c(ndim, ctypes.cast(array, ctypes.POINTER(ctypes.c_double)))
return stypes.cVectorToPython(array) |
def ExtractEvents(
self, parser_mediator, registry_key, codepage='cp1252', **kwargs):
"""Extracts events from a Windows Registry key.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
codepage (Optional[str]): extended ASCII string codepage.
"""
self._ParseMRUListExKey(parser_mediator, registry_key, codepage=codepage) | def function[ExtractEvents, parameter[self, parser_mediator, registry_key, codepage]]:
constant[Extracts events from a Windows Registry key.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
codepage (Optional[str]): extended ASCII string codepage.
]
call[name[self]._ParseMRUListExKey, parameter[name[parser_mediator], name[registry_key]]] | keyword[def] identifier[ExtractEvents] (
identifier[self] , identifier[parser_mediator] , identifier[registry_key] , identifier[codepage] = literal[string] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[_ParseMRUListExKey] ( identifier[parser_mediator] , identifier[registry_key] , identifier[codepage] = identifier[codepage] ) | def ExtractEvents(self, parser_mediator, registry_key, codepage='cp1252', **kwargs):
"""Extracts events from a Windows Registry key.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
codepage (Optional[str]): extended ASCII string codepage.
"""
self._ParseMRUListExKey(parser_mediator, registry_key, codepage=codepage) |
def experiments_predictions_delete(self, experiment_id, run_id, erase=False):
"""Delete given prediction for experiment.
Raises ValueError if an attempt is made to delete a read-only resource.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
erase : Boolean, optional
If true, the model run will be deleted from the database. Used in
case the sco backend could not start a model run after the record
had already been created in the database.
Returns
-------
ModelRunHandle
Handle for deleted model run or None if unknown
"""
# Get model run to ensure that it exists
model_run = self.experiments_predictions_get(experiment_id, run_id)
if model_run is None:
return None
# Return resutl of deleting model run. Could also raise exception in
# case of invalid database state (i.e., prediction does not exist)
return self.predictions.delete_object(model_run.identifier, erase=erase) | def function[experiments_predictions_delete, parameter[self, experiment_id, run_id, erase]]:
constant[Delete given prediction for experiment.
Raises ValueError if an attempt is made to delete a read-only resource.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
erase : Boolean, optional
If true, the model run will be deleted from the database. Used in
case the sco backend could not start a model run after the record
had already been created in the database.
Returns
-------
ModelRunHandle
Handle for deleted model run or None if unknown
]
variable[model_run] assign[=] call[name[self].experiments_predictions_get, parameter[name[experiment_id], name[run_id]]]
if compare[name[model_run] is constant[None]] begin[:]
return[constant[None]]
return[call[name[self].predictions.delete_object, parameter[name[model_run].identifier]]] | keyword[def] identifier[experiments_predictions_delete] ( identifier[self] , identifier[experiment_id] , identifier[run_id] , identifier[erase] = keyword[False] ):
literal[string]
identifier[model_run] = identifier[self] . identifier[experiments_predictions_get] ( identifier[experiment_id] , identifier[run_id] )
keyword[if] identifier[model_run] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[return] identifier[self] . identifier[predictions] . identifier[delete_object] ( identifier[model_run] . identifier[identifier] , identifier[erase] = identifier[erase] ) | def experiments_predictions_delete(self, experiment_id, run_id, erase=False):
"""Delete given prediction for experiment.
Raises ValueError if an attempt is made to delete a read-only resource.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
erase : Boolean, optional
If true, the model run will be deleted from the database. Used in
case the sco backend could not start a model run after the record
had already been created in the database.
Returns
-------
ModelRunHandle
Handle for deleted model run or None if unknown
"""
# Get model run to ensure that it exists
model_run = self.experiments_predictions_get(experiment_id, run_id)
if model_run is None:
return None # depends on [control=['if'], data=[]]
# Return resutl of deleting model run. Could also raise exception in
# case of invalid database state (i.e., prediction does not exist)
return self.predictions.delete_object(model_run.identifier, erase=erase) |
def from_dict(self, order_dict):
'''
从字段类型的字段 填充 对象的字段
:param order_dict: dict 类型
:return: self QA_Order
'''
try:
# QA_util_log_info('QA_ORDER CHANGE: from {} change to {}'.format(
# self.order_id, order['order_id']))
self.price = order_dict['price']
self.date = order_dict['date']
self.datetime = order_dict['datetime']
self.sending_time = order_dict['sending_time'] # 下单时间
self.trade_time = order_dict['trade_time']
self.amount = order_dict['amount']
self.frequence = order_dict['frequence']
self.market_type = order_dict['market_type']
self.towards = order_dict['towards']
self.code = order_dict['code']
self.user = order_dict['user']
self.account_cookie = order_dict['account_cookie']
self.strategy = order_dict['strategy']
self.type = order_dict['type']
self.order_model = order_dict['order_model']
self.amount_model = order_dict['amount_model']
self.order_id = order_dict['order_id']
self.realorder_id = order_dict['realorder_id']
self.trade_id = order_dict['trade_id']
self.callback = order_dict['callback']
self.commission_coeff = order_dict['commission_coeff']
self.tax_coeff = order_dict['tax_coeff']
self.money = order_dict['money']
self._status = order_dict['_status']
self.cancel_amount = order_dict['cancel_amount']
self.trade_amount = order_dict['trade_amount']
self.trade_price = order_dict['trade_price']
self.reason = order_dict['reason']
return self
except Exception as e:
QA_util_log_info('Failed to tran from dict {}'.format(e)) | def function[from_dict, parameter[self, order_dict]]:
constant[
从字段类型的字段 填充 对象的字段
:param order_dict: dict 类型
:return: self QA_Order
]
<ast.Try object at 0x7da1b20630d0> | keyword[def] identifier[from_dict] ( identifier[self] , identifier[order_dict] ):
literal[string]
keyword[try] :
identifier[self] . identifier[price] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[date] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[datetime] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[sending_time] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[trade_time] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[amount] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[frequence] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[market_type] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[towards] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[code] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[user] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[account_cookie] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[strategy] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[type] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[order_model] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[amount_model] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[order_id] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[realorder_id] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[trade_id] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[callback] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[commission_coeff] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[tax_coeff] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[money] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[_status] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[cancel_amount] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[trade_amount] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[trade_price] = identifier[order_dict] [ literal[string] ]
identifier[self] . identifier[reason] = identifier[order_dict] [ literal[string] ]
keyword[return] identifier[self]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[QA_util_log_info] ( literal[string] . identifier[format] ( identifier[e] )) | def from_dict(self, order_dict):
"""
从字段类型的字段 填充 对象的字段
:param order_dict: dict 类型
:return: self QA_Order
"""
try:
# QA_util_log_info('QA_ORDER CHANGE: from {} change to {}'.format(
# self.order_id, order['order_id']))
self.price = order_dict['price']
self.date = order_dict['date']
self.datetime = order_dict['datetime']
self.sending_time = order_dict['sending_time'] # 下单时间
self.trade_time = order_dict['trade_time']
self.amount = order_dict['amount']
self.frequence = order_dict['frequence']
self.market_type = order_dict['market_type']
self.towards = order_dict['towards']
self.code = order_dict['code']
self.user = order_dict['user']
self.account_cookie = order_dict['account_cookie']
self.strategy = order_dict['strategy']
self.type = order_dict['type']
self.order_model = order_dict['order_model']
self.amount_model = order_dict['amount_model']
self.order_id = order_dict['order_id']
self.realorder_id = order_dict['realorder_id']
self.trade_id = order_dict['trade_id']
self.callback = order_dict['callback']
self.commission_coeff = order_dict['commission_coeff']
self.tax_coeff = order_dict['tax_coeff']
self.money = order_dict['money']
self._status = order_dict['_status']
self.cancel_amount = order_dict['cancel_amount']
self.trade_amount = order_dict['trade_amount']
self.trade_price = order_dict['trade_price']
self.reason = order_dict['reason']
return self # depends on [control=['try'], data=[]]
except Exception as e:
QA_util_log_info('Failed to tran from dict {}'.format(e)) # depends on [control=['except'], data=['e']] |
def AddFileDescriptor(self, file_desc):
"""Adds a FileDescriptor to the pool, non-recursively.
If the FileDescriptor contains messages or enums, the caller must explicitly
register them.
Args:
file_desc: A FileDescriptor.
"""
self._AddFileDescriptor(file_desc)
# TODO(jieluo): This is a temporary solution for FieldDescriptor.file.
# Remove it when FieldDescriptor.file is added in code gen.
for extension in file_desc.extensions_by_name.values():
self._file_desc_by_toplevel_extension[
extension.full_name] = file_desc | def function[AddFileDescriptor, parameter[self, file_desc]]:
constant[Adds a FileDescriptor to the pool, non-recursively.
If the FileDescriptor contains messages or enums, the caller must explicitly
register them.
Args:
file_desc: A FileDescriptor.
]
call[name[self]._AddFileDescriptor, parameter[name[file_desc]]]
for taget[name[extension]] in starred[call[name[file_desc].extensions_by_name.values, parameter[]]] begin[:]
call[name[self]._file_desc_by_toplevel_extension][name[extension].full_name] assign[=] name[file_desc] | keyword[def] identifier[AddFileDescriptor] ( identifier[self] , identifier[file_desc] ):
literal[string]
identifier[self] . identifier[_AddFileDescriptor] ( identifier[file_desc] )
keyword[for] identifier[extension] keyword[in] identifier[file_desc] . identifier[extensions_by_name] . identifier[values] ():
identifier[self] . identifier[_file_desc_by_toplevel_extension] [
identifier[extension] . identifier[full_name] ]= identifier[file_desc] | def AddFileDescriptor(self, file_desc):
"""Adds a FileDescriptor to the pool, non-recursively.
If the FileDescriptor contains messages or enums, the caller must explicitly
register them.
Args:
file_desc: A FileDescriptor.
"""
self._AddFileDescriptor(file_desc)
# TODO(jieluo): This is a temporary solution for FieldDescriptor.file.
# Remove it when FieldDescriptor.file is added in code gen.
for extension in file_desc.extensions_by_name.values():
self._file_desc_by_toplevel_extension[extension.full_name] = file_desc # depends on [control=['for'], data=['extension']] |
def set_nlp_base(self, value):
''' setter '''
if isinstance(value, NlpBase) is False:
raise TypeError("The type of value must be NlpBase.")
self.__nlp_base = value | def function[set_nlp_base, parameter[self, value]]:
constant[ setter ]
if compare[call[name[isinstance], parameter[name[value], name[NlpBase]]] is constant[False]] begin[:]
<ast.Raise object at 0x7da20e9b2830>
name[self].__nlp_base assign[=] name[value] | keyword[def] identifier[set_nlp_base] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[NlpBase] ) keyword[is] keyword[False] :
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[self] . identifier[__nlp_base] = identifier[value] | def set_nlp_base(self, value):
""" setter """
if isinstance(value, NlpBase) is False:
raise TypeError('The type of value must be NlpBase.') # depends on [control=['if'], data=[]]
self.__nlp_base = value |
def _update_properties(self, name, value):
"""Update properties, and keep cache up-to-date if auto decode is
enabled.
:param str name: Key
:param obj value: Value
:return:
"""
if self._auto_decode and 'properties' in self._decode_cache:
self._decode_cache['properties'][name] = value
self._properties[name] = value | def function[_update_properties, parameter[self, name, value]]:
constant[Update properties, and keep cache up-to-date if auto decode is
enabled.
:param str name: Key
:param obj value: Value
:return:
]
if <ast.BoolOp object at 0x7da20c6aa5f0> begin[:]
call[call[name[self]._decode_cache][constant[properties]]][name[name]] assign[=] name[value]
call[name[self]._properties][name[name]] assign[=] name[value] | keyword[def] identifier[_update_properties] ( identifier[self] , identifier[name] , identifier[value] ):
literal[string]
keyword[if] identifier[self] . identifier[_auto_decode] keyword[and] literal[string] keyword[in] identifier[self] . identifier[_decode_cache] :
identifier[self] . identifier[_decode_cache] [ literal[string] ][ identifier[name] ]= identifier[value]
identifier[self] . identifier[_properties] [ identifier[name] ]= identifier[value] | def _update_properties(self, name, value):
"""Update properties, and keep cache up-to-date if auto decode is
enabled.
:param str name: Key
:param obj value: Value
:return:
"""
if self._auto_decode and 'properties' in self._decode_cache:
self._decode_cache['properties'][name] = value # depends on [control=['if'], data=[]]
self._properties[name] = value |
def _get_existing_conf(config):
"""
Read existing local.conf and strip out service id and client secret
:param config: Location of config files
:param lines of existing config (excluding service id and client secret)
"""
try:
with open(os.path.join(config, 'local.conf'), 'r') as f:
lines = [line for line in f.readlines()
if not (line.startswith('service_id') or line.startswith('client_secret'))]
except IOError:
lines = []
return lines | def function[_get_existing_conf, parameter[config]]:
constant[
Read existing local.conf and strip out service id and client secret
:param config: Location of config files
:param lines of existing config (excluding service id and client secret)
]
<ast.Try object at 0x7da1b09bbd30>
return[name[lines]] | keyword[def] identifier[_get_existing_conf] ( identifier[config] ):
literal[string]
keyword[try] :
keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[config] , literal[string] ), literal[string] ) keyword[as] identifier[f] :
identifier[lines] =[ identifier[line] keyword[for] identifier[line] keyword[in] identifier[f] . identifier[readlines] ()
keyword[if] keyword[not] ( identifier[line] . identifier[startswith] ( literal[string] ) keyword[or] identifier[line] . identifier[startswith] ( literal[string] ))]
keyword[except] identifier[IOError] :
identifier[lines] =[]
keyword[return] identifier[lines] | def _get_existing_conf(config):
"""
Read existing local.conf and strip out service id and client secret
:param config: Location of config files
:param lines of existing config (excluding service id and client secret)
"""
try:
with open(os.path.join(config, 'local.conf'), 'r') as f:
lines = [line for line in f.readlines() if not (line.startswith('service_id') or line.startswith('client_secret'))] # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]]
except IOError:
lines = [] # depends on [control=['except'], data=[]]
return lines |
def ticket_comment_make_private(self, ticket_id, id, data, **kwargs):
"https://developer.zendesk.com/rest_api/docs/core/ticket_comments#make-comment-private"
api_path = "/api/v2/tickets/{ticket_id}/comments/{id}/make_private.json"
api_path = api_path.format(ticket_id=ticket_id, id=id)
return self.call(api_path, method="PUT", data=data, **kwargs) | def function[ticket_comment_make_private, parameter[self, ticket_id, id, data]]:
constant[https://developer.zendesk.com/rest_api/docs/core/ticket_comments#make-comment-private]
variable[api_path] assign[=] constant[/api/v2/tickets/{ticket_id}/comments/{id}/make_private.json]
variable[api_path] assign[=] call[name[api_path].format, parameter[]]
return[call[name[self].call, parameter[name[api_path]]]] | keyword[def] identifier[ticket_comment_make_private] ( identifier[self] , identifier[ticket_id] , identifier[id] , identifier[data] ,** identifier[kwargs] ):
literal[string]
identifier[api_path] = literal[string]
identifier[api_path] = identifier[api_path] . identifier[format] ( identifier[ticket_id] = identifier[ticket_id] , identifier[id] = identifier[id] )
keyword[return] identifier[self] . identifier[call] ( identifier[api_path] , identifier[method] = literal[string] , identifier[data] = identifier[data] ,** identifier[kwargs] ) | def ticket_comment_make_private(self, ticket_id, id, data, **kwargs):
"""https://developer.zendesk.com/rest_api/docs/core/ticket_comments#make-comment-private"""
api_path = '/api/v2/tickets/{ticket_id}/comments/{id}/make_private.json'
api_path = api_path.format(ticket_id=ticket_id, id=id)
return self.call(api_path, method='PUT', data=data, **kwargs) |
def append(self, item):
"""Add item to the right side of the GeventDeque.
This method does not block. Either the GeventDeque grows to
consume available memory, or if this GeventDeque has and is at
maxlen, the leftmost item is removed.
"""
self._deque.append(item)
self.notEmpty.set() | def function[append, parameter[self, item]]:
constant[Add item to the right side of the GeventDeque.
This method does not block. Either the GeventDeque grows to
consume available memory, or if this GeventDeque has and is at
maxlen, the leftmost item is removed.
]
call[name[self]._deque.append, parameter[name[item]]]
call[name[self].notEmpty.set, parameter[]] | keyword[def] identifier[append] ( identifier[self] , identifier[item] ):
literal[string]
identifier[self] . identifier[_deque] . identifier[append] ( identifier[item] )
identifier[self] . identifier[notEmpty] . identifier[set] () | def append(self, item):
"""Add item to the right side of the GeventDeque.
This method does not block. Either the GeventDeque grows to
consume available memory, or if this GeventDeque has and is at
maxlen, the leftmost item is removed.
"""
self._deque.append(item)
self.notEmpty.set() |
def pair_bars(first: Histogram1D, second: Histogram2D, *, orientation: str = "vertical", kind: str = "bar", **kwargs):
"""Draw two different histograms mirrored in one figure.
Parameters
----------
first: Histogram1D
second: Histogram1D
color1:
color2:
orientation: str
Returns
-------
plt.Axes
"""
# TODO: enable vertical as well as horizontal
_, ax = _get_axes(kwargs)
color1 = kwargs.pop("color1", "red")
color2 = kwargs.pop("color2", "blue")
title = kwargs.pop("title", "{0} - {1}".format(first.name, second.name))
xlim = kwargs.pop("xlim", (min(first.bin_left_edges[0], first.bin_left_edges[
0]), max(first.bin_right_edges[-1], second.bin_right_edges[-1])))
bar(first * (-1), color=color1, ax=ax, ylim="keep", **kwargs)
bar(second, color=color2, ax=ax, ylim="keep", **kwargs)
ax.set_title(title)
ticks = np.abs(ax.get_yticks())
if np.allclose(np.rint(ticks), ticks):
ax.set_yticklabels(ticks.astype(int))
else:
ax.set_yticklabels(ticks)
ax.set_xlim(xlim)
ax.legend()
return ax | def function[pair_bars, parameter[first, second]]:
constant[Draw two different histograms mirrored in one figure.
Parameters
----------
first: Histogram1D
second: Histogram1D
color1:
color2:
orientation: str
Returns
-------
plt.Axes
]
<ast.Tuple object at 0x7da18f7237f0> assign[=] call[name[_get_axes], parameter[name[kwargs]]]
variable[color1] assign[=] call[name[kwargs].pop, parameter[constant[color1], constant[red]]]
variable[color2] assign[=] call[name[kwargs].pop, parameter[constant[color2], constant[blue]]]
variable[title] assign[=] call[name[kwargs].pop, parameter[constant[title], call[constant[{0} - {1}].format, parameter[name[first].name, name[second].name]]]]
variable[xlim] assign[=] call[name[kwargs].pop, parameter[constant[xlim], tuple[[<ast.Call object at 0x7da18f721c90>, <ast.Call object at 0x7da18f720460>]]]]
call[name[bar], parameter[binary_operation[name[first] * <ast.UnaryOp object at 0x7da18f722770>]]]
call[name[bar], parameter[name[second]]]
call[name[ax].set_title, parameter[name[title]]]
variable[ticks] assign[=] call[name[np].abs, parameter[call[name[ax].get_yticks, parameter[]]]]
if call[name[np].allclose, parameter[call[name[np].rint, parameter[name[ticks]]], name[ticks]]] begin[:]
call[name[ax].set_yticklabels, parameter[call[name[ticks].astype, parameter[name[int]]]]]
call[name[ax].set_xlim, parameter[name[xlim]]]
call[name[ax].legend, parameter[]]
return[name[ax]] | keyword[def] identifier[pair_bars] ( identifier[first] : identifier[Histogram1D] , identifier[second] : identifier[Histogram2D] ,*, identifier[orientation] : identifier[str] = literal[string] , identifier[kind] : identifier[str] = literal[string] ,** identifier[kwargs] ):
literal[string]
identifier[_] , identifier[ax] = identifier[_get_axes] ( identifier[kwargs] )
identifier[color1] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[string] )
identifier[color2] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[string] )
identifier[title] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[string] . identifier[format] ( identifier[first] . identifier[name] , identifier[second] . identifier[name] ))
identifier[xlim] = identifier[kwargs] . identifier[pop] ( literal[string] ,( identifier[min] ( identifier[first] . identifier[bin_left_edges] [ literal[int] ], identifier[first] . identifier[bin_left_edges] [
literal[int] ]), identifier[max] ( identifier[first] . identifier[bin_right_edges] [- literal[int] ], identifier[second] . identifier[bin_right_edges] [- literal[int] ])))
identifier[bar] ( identifier[first] *(- literal[int] ), identifier[color] = identifier[color1] , identifier[ax] = identifier[ax] , identifier[ylim] = literal[string] ,** identifier[kwargs] )
identifier[bar] ( identifier[second] , identifier[color] = identifier[color2] , identifier[ax] = identifier[ax] , identifier[ylim] = literal[string] ,** identifier[kwargs] )
identifier[ax] . identifier[set_title] ( identifier[title] )
identifier[ticks] = identifier[np] . identifier[abs] ( identifier[ax] . identifier[get_yticks] ())
keyword[if] identifier[np] . identifier[allclose] ( identifier[np] . identifier[rint] ( identifier[ticks] ), identifier[ticks] ):
identifier[ax] . identifier[set_yticklabels] ( identifier[ticks] . identifier[astype] ( identifier[int] ))
keyword[else] :
identifier[ax] . identifier[set_yticklabels] ( identifier[ticks] )
identifier[ax] . identifier[set_xlim] ( identifier[xlim] )
identifier[ax] . identifier[legend] ()
keyword[return] identifier[ax] | def pair_bars(first: Histogram1D, second: Histogram2D, *, orientation: str='vertical', kind: str='bar', **kwargs):
"""Draw two different histograms mirrored in one figure.
Parameters
----------
first: Histogram1D
second: Histogram1D
color1:
color2:
orientation: str
Returns
-------
plt.Axes
"""
# TODO: enable vertical as well as horizontal
(_, ax) = _get_axes(kwargs)
color1 = kwargs.pop('color1', 'red')
color2 = kwargs.pop('color2', 'blue')
title = kwargs.pop('title', '{0} - {1}'.format(first.name, second.name))
xlim = kwargs.pop('xlim', (min(first.bin_left_edges[0], first.bin_left_edges[0]), max(first.bin_right_edges[-1], second.bin_right_edges[-1])))
bar(first * -1, color=color1, ax=ax, ylim='keep', **kwargs)
bar(second, color=color2, ax=ax, ylim='keep', **kwargs)
ax.set_title(title)
ticks = np.abs(ax.get_yticks())
if np.allclose(np.rint(ticks), ticks):
ax.set_yticklabels(ticks.astype(int)) # depends on [control=['if'], data=[]]
else:
ax.set_yticklabels(ticks)
ax.set_xlim(xlim)
ax.legend()
return ax |
def confd_state_ha_master_node_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
confd_state = ET.SubElement(config, "confd-state", xmlns="http://tail-f.com/yang/confd-monitoring")
ha = ET.SubElement(confd_state, "ha")
master_node_id = ET.SubElement(ha, "master-node-id")
master_node_id.text = kwargs.pop('master_node_id')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[confd_state_ha_master_node_id, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[confd_state] assign[=] call[name[ET].SubElement, parameter[name[config], constant[confd-state]]]
variable[ha] assign[=] call[name[ET].SubElement, parameter[name[confd_state], constant[ha]]]
variable[master_node_id] assign[=] call[name[ET].SubElement, parameter[name[ha], constant[master-node-id]]]
name[master_node_id].text assign[=] call[name[kwargs].pop, parameter[constant[master_node_id]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[confd_state_ha_master_node_id] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[confd_state] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[ha] = identifier[ET] . identifier[SubElement] ( identifier[confd_state] , literal[string] )
identifier[master_node_id] = identifier[ET] . identifier[SubElement] ( identifier[ha] , literal[string] )
identifier[master_node_id] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def confd_state_ha_master_node_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
confd_state = ET.SubElement(config, 'confd-state', xmlns='http://tail-f.com/yang/confd-monitoring')
ha = ET.SubElement(confd_state, 'ha')
master_node_id = ET.SubElement(ha, 'master-node-id')
master_node_id.text = kwargs.pop('master_node_id')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def central_widget(self):
""" Returns the default widget that occupies the entire area of the
canvas.
"""
if self._central_widget is None:
self._central_widget = Widget(size=self.size, parent=self.scene)
return self._central_widget | def function[central_widget, parameter[self]]:
constant[ Returns the default widget that occupies the entire area of the
canvas.
]
if compare[name[self]._central_widget is constant[None]] begin[:]
name[self]._central_widget assign[=] call[name[Widget], parameter[]]
return[name[self]._central_widget] | keyword[def] identifier[central_widget] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_central_widget] keyword[is] keyword[None] :
identifier[self] . identifier[_central_widget] = identifier[Widget] ( identifier[size] = identifier[self] . identifier[size] , identifier[parent] = identifier[self] . identifier[scene] )
keyword[return] identifier[self] . identifier[_central_widget] | def central_widget(self):
""" Returns the default widget that occupies the entire area of the
canvas.
"""
if self._central_widget is None:
self._central_widget = Widget(size=self.size, parent=self.scene) # depends on [control=['if'], data=[]]
return self._central_widget |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.