code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def add_relationship_methods(self): """ Adds relationship methods to applicable model classes. """ Entry = apps.get_model('wagtailrelations', 'Entry') @cached_property def related(instance): return instance.get_related() @cached_property def related_live(instance): return instance.get_related_live() @cached_property def related_with_scores(instance): return instance.get_related_with_scores() def get_related(instance): entry = Entry.objects.get_for_model(instance)[0] return entry.get_related() def get_related_live(instance): entry = Entry.objects.get_for_model(instance)[0] return entry.get_related_live() def get_related_with_scores(instance): try: entry = Entry.objects.get_for_model(instance)[0] return entry.get_related_with_scores() except IntegrityError: return [] for model in self.applicable_models: model.add_to_class( 'get_related', get_related ) model.add_to_class( 'get_related_live', get_related_live ) model.add_to_class( 'get_related_with_scores', get_related_with_scores ) model.add_to_class( 'related', related ) model.add_to_class( 'related_live', related_live ) model.add_to_class( 'related_with_scores', related_with_scores )
def function[add_relationship_methods, parameter[self]]: constant[ Adds relationship methods to applicable model classes. ] variable[Entry] assign[=] call[name[apps].get_model, parameter[constant[wagtailrelations], constant[Entry]]] def function[related, parameter[instance]]: return[call[name[instance].get_related, parameter[]]] def function[related_live, parameter[instance]]: return[call[name[instance].get_related_live, parameter[]]] def function[related_with_scores, parameter[instance]]: return[call[name[instance].get_related_with_scores, parameter[]]] def function[get_related, parameter[instance]]: variable[entry] assign[=] call[call[name[Entry].objects.get_for_model, parameter[name[instance]]]][constant[0]] return[call[name[entry].get_related, parameter[]]] def function[get_related_live, parameter[instance]]: variable[entry] assign[=] call[call[name[Entry].objects.get_for_model, parameter[name[instance]]]][constant[0]] return[call[name[entry].get_related_live, parameter[]]] def function[get_related_with_scores, parameter[instance]]: <ast.Try object at 0x7da1b0f9c4c0> for taget[name[model]] in starred[name[self].applicable_models] begin[:] call[name[model].add_to_class, parameter[constant[get_related], name[get_related]]] call[name[model].add_to_class, parameter[constant[get_related_live], name[get_related_live]]] call[name[model].add_to_class, parameter[constant[get_related_with_scores], name[get_related_with_scores]]] call[name[model].add_to_class, parameter[constant[related], name[related]]] call[name[model].add_to_class, parameter[constant[related_live], name[related_live]]] call[name[model].add_to_class, parameter[constant[related_with_scores], name[related_with_scores]]]
keyword[def] identifier[add_relationship_methods] ( identifier[self] ): literal[string] identifier[Entry] = identifier[apps] . identifier[get_model] ( literal[string] , literal[string] ) @ identifier[cached_property] keyword[def] identifier[related] ( identifier[instance] ): keyword[return] identifier[instance] . identifier[get_related] () @ identifier[cached_property] keyword[def] identifier[related_live] ( identifier[instance] ): keyword[return] identifier[instance] . identifier[get_related_live] () @ identifier[cached_property] keyword[def] identifier[related_with_scores] ( identifier[instance] ): keyword[return] identifier[instance] . identifier[get_related_with_scores] () keyword[def] identifier[get_related] ( identifier[instance] ): identifier[entry] = identifier[Entry] . identifier[objects] . identifier[get_for_model] ( identifier[instance] )[ literal[int] ] keyword[return] identifier[entry] . identifier[get_related] () keyword[def] identifier[get_related_live] ( identifier[instance] ): identifier[entry] = identifier[Entry] . identifier[objects] . identifier[get_for_model] ( identifier[instance] )[ literal[int] ] keyword[return] identifier[entry] . identifier[get_related_live] () keyword[def] identifier[get_related_with_scores] ( identifier[instance] ): keyword[try] : identifier[entry] = identifier[Entry] . identifier[objects] . identifier[get_for_model] ( identifier[instance] )[ literal[int] ] keyword[return] identifier[entry] . identifier[get_related_with_scores] () keyword[except] identifier[IntegrityError] : keyword[return] [] keyword[for] identifier[model] keyword[in] identifier[self] . identifier[applicable_models] : identifier[model] . identifier[add_to_class] ( literal[string] , identifier[get_related] ) identifier[model] . identifier[add_to_class] ( literal[string] , identifier[get_related_live] ) identifier[model] . identifier[add_to_class] ( literal[string] , identifier[get_related_with_scores] ) identifier[model] . identifier[add_to_class] ( literal[string] , identifier[related] ) identifier[model] . identifier[add_to_class] ( literal[string] , identifier[related_live] ) identifier[model] . identifier[add_to_class] ( literal[string] , identifier[related_with_scores] )
def add_relationship_methods(self): """ Adds relationship methods to applicable model classes. """ Entry = apps.get_model('wagtailrelations', 'Entry') @cached_property def related(instance): return instance.get_related() @cached_property def related_live(instance): return instance.get_related_live() @cached_property def related_with_scores(instance): return instance.get_related_with_scores() def get_related(instance): entry = Entry.objects.get_for_model(instance)[0] return entry.get_related() def get_related_live(instance): entry = Entry.objects.get_for_model(instance)[0] return entry.get_related_live() def get_related_with_scores(instance): try: entry = Entry.objects.get_for_model(instance)[0] return entry.get_related_with_scores() # depends on [control=['try'], data=[]] except IntegrityError: return [] # depends on [control=['except'], data=[]] for model in self.applicable_models: model.add_to_class('get_related', get_related) model.add_to_class('get_related_live', get_related_live) model.add_to_class('get_related_with_scores', get_related_with_scores) model.add_to_class('related', related) model.add_to_class('related_live', related_live) model.add_to_class('related_with_scores', related_with_scores) # depends on [control=['for'], data=['model']]
def report(self, event, metadata=None, block=None): """ Reports an event to Alooma by formatting it properly and placing it in the buffer to be sent by the Sender instance :param event: A dict / string representing an event :param metadata: (Optional) A dict with extra metadata to be attached to the event :param block: (Optional) If True, the function will block the thread until the event buffer has space for the event. If False, reported events are discarded if the queue is full. Defaults to None, which uses the global `block` parameter given in the `init`. :return: True if the event was successfully enqueued, else False """ # Don't allow reporting if the underlying sender is terminated if self._sender.is_terminated: self._notify(logging.ERROR, consts.LOG_MSG_REPORT_AFTER_TERMINATION) return False # Send the event to the queue if it is a dict or a string. if isinstance(event, (dict,) + py2to3.basestring): formatted_event = self._format_event(event, metadata) should_block = block if block is not None else self.is_blocking return self._sender.enqueue_event(formatted_event, should_block) else: # Event is not a dict nor a string. Deny it. error_message = (consts.LOG_MSG_BAD_EVENT % (type(event), event)) self._notify(logging.ERROR, error_message) return False
def function[report, parameter[self, event, metadata, block]]: constant[ Reports an event to Alooma by formatting it properly and placing it in the buffer to be sent by the Sender instance :param event: A dict / string representing an event :param metadata: (Optional) A dict with extra metadata to be attached to the event :param block: (Optional) If True, the function will block the thread until the event buffer has space for the event. If False, reported events are discarded if the queue is full. Defaults to None, which uses the global `block` parameter given in the `init`. :return: True if the event was successfully enqueued, else False ] if name[self]._sender.is_terminated begin[:] call[name[self]._notify, parameter[name[logging].ERROR, name[consts].LOG_MSG_REPORT_AFTER_TERMINATION]] return[constant[False]] if call[name[isinstance], parameter[name[event], binary_operation[tuple[[<ast.Name object at 0x7da20c6a8fd0>]] + name[py2to3].basestring]]] begin[:] variable[formatted_event] assign[=] call[name[self]._format_event, parameter[name[event], name[metadata]]] variable[should_block] assign[=] <ast.IfExp object at 0x7da20c6a88e0> return[call[name[self]._sender.enqueue_event, parameter[name[formatted_event], name[should_block]]]]
keyword[def] identifier[report] ( identifier[self] , identifier[event] , identifier[metadata] = keyword[None] , identifier[block] = keyword[None] ): literal[string] keyword[if] identifier[self] . identifier[_sender] . identifier[is_terminated] : identifier[self] . identifier[_notify] ( identifier[logging] . identifier[ERROR] , identifier[consts] . identifier[LOG_MSG_REPORT_AFTER_TERMINATION] ) keyword[return] keyword[False] keyword[if] identifier[isinstance] ( identifier[event] ,( identifier[dict] ,)+ identifier[py2to3] . identifier[basestring] ): identifier[formatted_event] = identifier[self] . identifier[_format_event] ( identifier[event] , identifier[metadata] ) identifier[should_block] = identifier[block] keyword[if] identifier[block] keyword[is] keyword[not] keyword[None] keyword[else] identifier[self] . identifier[is_blocking] keyword[return] identifier[self] . identifier[_sender] . identifier[enqueue_event] ( identifier[formatted_event] , identifier[should_block] ) keyword[else] : identifier[error_message] =( identifier[consts] . identifier[LOG_MSG_BAD_EVENT] %( identifier[type] ( identifier[event] ), identifier[event] )) identifier[self] . identifier[_notify] ( identifier[logging] . identifier[ERROR] , identifier[error_message] ) keyword[return] keyword[False]
def report(self, event, metadata=None, block=None): """ Reports an event to Alooma by formatting it properly and placing it in the buffer to be sent by the Sender instance :param event: A dict / string representing an event :param metadata: (Optional) A dict with extra metadata to be attached to the event :param block: (Optional) If True, the function will block the thread until the event buffer has space for the event. If False, reported events are discarded if the queue is full. Defaults to None, which uses the global `block` parameter given in the `init`. :return: True if the event was successfully enqueued, else False """ # Don't allow reporting if the underlying sender is terminated if self._sender.is_terminated: self._notify(logging.ERROR, consts.LOG_MSG_REPORT_AFTER_TERMINATION) return False # depends on [control=['if'], data=[]] # Send the event to the queue if it is a dict or a string. if isinstance(event, (dict,) + py2to3.basestring): formatted_event = self._format_event(event, metadata) should_block = block if block is not None else self.is_blocking return self._sender.enqueue_event(formatted_event, should_block) # depends on [control=['if'], data=[]] else: # Event is not a dict nor a string. Deny it. error_message = consts.LOG_MSG_BAD_EVENT % (type(event), event) self._notify(logging.ERROR, error_message) return False
def timestamp(self, message="", checkpoint=None, finished=False, raise_error=True): """ Print message, time, and time elapsed, perhaps creating checkpoint. This prints your given message, along with the current time, and time elapsed since the previous timestamp() call. If you specify a HEADING by beginning the message with "###", it surrounds the message with newlines for easier readability in the log file. If a checkpoint is designated, an empty file is created corresponding to the name given. Depending on how this manager's been configured, the value of the checkpoint, and whether this timestamp indicates initiation or completion of a group of pipeline steps, this call may stop the pipeline's execution. :param str message: Message to timestamp. :param str checkpoint: Name of checkpoint; this tends to be something that reflects the processing logic about to be or having just been completed. Provision of an argument to this parameter means that a checkpoint file will be created, facilitating arbitrary starting and stopping point for the pipeline as desired. :param bool finished: Whether this call represents the completion of a conceptual unit of a pipeline's processing :param raise_error: Whether to raise exception if checkpoint or current state indicates that a halt should occur. """ # Halt if the manager's state has been set such that this call # should halt the pipeline. if self.halt_on_next: self.halt(checkpoint, finished, raise_error=raise_error) # Determine action to take with respect to halting if needed. if checkpoint: if finished: # Write the file. self._checkpoint(checkpoint) self.prev_checkpoint = checkpoint self.curr_checkpoint = None else: self.prev_checkpoint = self.curr_checkpoint self.curr_checkpoint = checkpoint self._checkpoint(self.prev_checkpoint) # Handle the two halting conditions. if (finished and checkpoint == self.stop_after) or (not finished and checkpoint == self.stop_before): self.halt(checkpoint, finished, raise_error=raise_error) # Determine if we've started executing. elif checkpoint == self.start_point: self._active = True # If this is a prospective checkpoint, set the current checkpoint # accordingly and whether we should halt the pipeline on the # next timestamp call. if not finished and checkpoint == self.stop_after: self.halt_on_next = True elapsed = self.time_elapsed(self.last_timestamp) t = time.strftime("%m-%d %H:%M:%S") if checkpoint is None: msg = "{m} ({t}) elapsed: {delta_t} _TIME_".\ format(m=message, t=t, delta_t=elapsed) else: msg = "{m} ({t}) ({status} {stage}) elapsed: {delta_t} _TIME_".\ format(m=message, t=t, status="finished" if finished else "starting", stage=checkpoint, delta_t=elapsed) if re.match("^###", message): msg = "\n{}\n".format(msg) print(msg) self.last_timestamp = time.time()
def function[timestamp, parameter[self, message, checkpoint, finished, raise_error]]: constant[ Print message, time, and time elapsed, perhaps creating checkpoint. This prints your given message, along with the current time, and time elapsed since the previous timestamp() call. If you specify a HEADING by beginning the message with "###", it surrounds the message with newlines for easier readability in the log file. If a checkpoint is designated, an empty file is created corresponding to the name given. Depending on how this manager's been configured, the value of the checkpoint, and whether this timestamp indicates initiation or completion of a group of pipeline steps, this call may stop the pipeline's execution. :param str message: Message to timestamp. :param str checkpoint: Name of checkpoint; this tends to be something that reflects the processing logic about to be or having just been completed. Provision of an argument to this parameter means that a checkpoint file will be created, facilitating arbitrary starting and stopping point for the pipeline as desired. :param bool finished: Whether this call represents the completion of a conceptual unit of a pipeline's processing :param raise_error: Whether to raise exception if checkpoint or current state indicates that a halt should occur. ] if name[self].halt_on_next begin[:] call[name[self].halt, parameter[name[checkpoint], name[finished]]] if name[checkpoint] begin[:] if name[finished] begin[:] call[name[self]._checkpoint, parameter[name[checkpoint]]] name[self].prev_checkpoint assign[=] name[checkpoint] name[self].curr_checkpoint assign[=] constant[None] if <ast.BoolOp object at 0x7da1b03290c0> begin[:] call[name[self].halt, parameter[name[checkpoint], name[finished]]] if <ast.BoolOp object at 0x7da1b03296c0> begin[:] name[self].halt_on_next assign[=] constant[True] variable[elapsed] assign[=] call[name[self].time_elapsed, parameter[name[self].last_timestamp]] variable[t] assign[=] call[name[time].strftime, parameter[constant[%m-%d %H:%M:%S]]] if compare[name[checkpoint] is constant[None]] begin[:] variable[msg] assign[=] call[constant[{m} ({t}) elapsed: {delta_t} _TIME_].format, parameter[]] if call[name[re].match, parameter[constant[^###], name[message]]] begin[:] variable[msg] assign[=] call[constant[ {} ].format, parameter[name[msg]]] call[name[print], parameter[name[msg]]] name[self].last_timestamp assign[=] call[name[time].time, parameter[]]
keyword[def] identifier[timestamp] ( identifier[self] , identifier[message] = literal[string] , identifier[checkpoint] = keyword[None] , identifier[finished] = keyword[False] , identifier[raise_error] = keyword[True] ): literal[string] keyword[if] identifier[self] . identifier[halt_on_next] : identifier[self] . identifier[halt] ( identifier[checkpoint] , identifier[finished] , identifier[raise_error] = identifier[raise_error] ) keyword[if] identifier[checkpoint] : keyword[if] identifier[finished] : identifier[self] . identifier[_checkpoint] ( identifier[checkpoint] ) identifier[self] . identifier[prev_checkpoint] = identifier[checkpoint] identifier[self] . identifier[curr_checkpoint] = keyword[None] keyword[else] : identifier[self] . identifier[prev_checkpoint] = identifier[self] . identifier[curr_checkpoint] identifier[self] . identifier[curr_checkpoint] = identifier[checkpoint] identifier[self] . identifier[_checkpoint] ( identifier[self] . identifier[prev_checkpoint] ) keyword[if] ( identifier[finished] keyword[and] identifier[checkpoint] == identifier[self] . identifier[stop_after] ) keyword[or] ( keyword[not] identifier[finished] keyword[and] identifier[checkpoint] == identifier[self] . identifier[stop_before] ): identifier[self] . identifier[halt] ( identifier[checkpoint] , identifier[finished] , identifier[raise_error] = identifier[raise_error] ) keyword[elif] identifier[checkpoint] == identifier[self] . identifier[start_point] : identifier[self] . identifier[_active] = keyword[True] keyword[if] keyword[not] identifier[finished] keyword[and] identifier[checkpoint] == identifier[self] . identifier[stop_after] : identifier[self] . identifier[halt_on_next] = keyword[True] identifier[elapsed] = identifier[self] . identifier[time_elapsed] ( identifier[self] . identifier[last_timestamp] ) identifier[t] = identifier[time] . identifier[strftime] ( literal[string] ) keyword[if] identifier[checkpoint] keyword[is] keyword[None] : identifier[msg] = literal[string] . identifier[format] ( identifier[m] = identifier[message] , identifier[t] = identifier[t] , identifier[delta_t] = identifier[elapsed] ) keyword[else] : identifier[msg] = literal[string] . identifier[format] ( identifier[m] = identifier[message] , identifier[t] = identifier[t] , identifier[status] = literal[string] keyword[if] identifier[finished] keyword[else] literal[string] , identifier[stage] = identifier[checkpoint] , identifier[delta_t] = identifier[elapsed] ) keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[message] ): identifier[msg] = literal[string] . identifier[format] ( identifier[msg] ) identifier[print] ( identifier[msg] ) identifier[self] . identifier[last_timestamp] = identifier[time] . identifier[time] ()
def timestamp(self, message='', checkpoint=None, finished=False, raise_error=True): """ Print message, time, and time elapsed, perhaps creating checkpoint. This prints your given message, along with the current time, and time elapsed since the previous timestamp() call. If you specify a HEADING by beginning the message with "###", it surrounds the message with newlines for easier readability in the log file. If a checkpoint is designated, an empty file is created corresponding to the name given. Depending on how this manager's been configured, the value of the checkpoint, and whether this timestamp indicates initiation or completion of a group of pipeline steps, this call may stop the pipeline's execution. :param str message: Message to timestamp. :param str checkpoint: Name of checkpoint; this tends to be something that reflects the processing logic about to be or having just been completed. Provision of an argument to this parameter means that a checkpoint file will be created, facilitating arbitrary starting and stopping point for the pipeline as desired. :param bool finished: Whether this call represents the completion of a conceptual unit of a pipeline's processing :param raise_error: Whether to raise exception if checkpoint or current state indicates that a halt should occur. """ # Halt if the manager's state has been set such that this call # should halt the pipeline. if self.halt_on_next: self.halt(checkpoint, finished, raise_error=raise_error) # depends on [control=['if'], data=[]] # Determine action to take with respect to halting if needed. if checkpoint: if finished: # Write the file. self._checkpoint(checkpoint) self.prev_checkpoint = checkpoint self.curr_checkpoint = None # depends on [control=['if'], data=[]] else: self.prev_checkpoint = self.curr_checkpoint self.curr_checkpoint = checkpoint self._checkpoint(self.prev_checkpoint) # Handle the two halting conditions. if finished and checkpoint == self.stop_after or (not finished and checkpoint == self.stop_before): self.halt(checkpoint, finished, raise_error=raise_error) # depends on [control=['if'], data=[]] # Determine if we've started executing. elif checkpoint == self.start_point: self._active = True # depends on [control=['if'], data=[]] # If this is a prospective checkpoint, set the current checkpoint # accordingly and whether we should halt the pipeline on the # next timestamp call. if not finished and checkpoint == self.stop_after: self.halt_on_next = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elapsed = self.time_elapsed(self.last_timestamp) t = time.strftime('%m-%d %H:%M:%S') if checkpoint is None: msg = '{m} ({t}) elapsed: {delta_t} _TIME_'.format(m=message, t=t, delta_t=elapsed) # depends on [control=['if'], data=[]] else: msg = '{m} ({t}) ({status} {stage}) elapsed: {delta_t} _TIME_'.format(m=message, t=t, status='finished' if finished else 'starting', stage=checkpoint, delta_t=elapsed) if re.match('^###', message): msg = '\n{}\n'.format(msg) # depends on [control=['if'], data=[]] print(msg) self.last_timestamp = time.time()
def send_file_to_remote(dev, src_file, dst_filename, filesize, dst_mode='wb'): """Intended to be passed to the `remote` function as the xfer_func argument. Matches up with recv_file_from_host. """ bytes_remaining = filesize save_timeout = dev.timeout dev.timeout = 1 while bytes_remaining > 0: # Wait for ack so we don't get too far ahead of the remote ack = dev.read(1) if ack is None or ack != b'\x06': sys.stderr.write("timed out or error in transfer to remote\n") sys.exit(2) if HAS_BUFFER: buf_size = BUFFER_SIZE else: buf_size = BUFFER_SIZE // 2 read_size = min(bytes_remaining, buf_size) buf = src_file.read(read_size) #sys.stdout.write('\r%d/%d' % (filesize - bytes_remaining, filesize)) #sys.stdout.flush() if HAS_BUFFER: dev.write(buf) else: dev.write(binascii.hexlify(buf)) bytes_remaining -= read_size #sys.stdout.write('\r') dev.timeout = save_timeout
def function[send_file_to_remote, parameter[dev, src_file, dst_filename, filesize, dst_mode]]: constant[Intended to be passed to the `remote` function as the xfer_func argument. Matches up with recv_file_from_host. ] variable[bytes_remaining] assign[=] name[filesize] variable[save_timeout] assign[=] name[dev].timeout name[dev].timeout assign[=] constant[1] while compare[name[bytes_remaining] greater[>] constant[0]] begin[:] variable[ack] assign[=] call[name[dev].read, parameter[constant[1]]] if <ast.BoolOp object at 0x7da20c6c6e60> begin[:] call[name[sys].stderr.write, parameter[constant[timed out or error in transfer to remote ]]] call[name[sys].exit, parameter[constant[2]]] if name[HAS_BUFFER] begin[:] variable[buf_size] assign[=] name[BUFFER_SIZE] variable[read_size] assign[=] call[name[min], parameter[name[bytes_remaining], name[buf_size]]] variable[buf] assign[=] call[name[src_file].read, parameter[name[read_size]]] if name[HAS_BUFFER] begin[:] call[name[dev].write, parameter[name[buf]]] <ast.AugAssign object at 0x7da20c6c5b40> name[dev].timeout assign[=] name[save_timeout]
keyword[def] identifier[send_file_to_remote] ( identifier[dev] , identifier[src_file] , identifier[dst_filename] , identifier[filesize] , identifier[dst_mode] = literal[string] ): literal[string] identifier[bytes_remaining] = identifier[filesize] identifier[save_timeout] = identifier[dev] . identifier[timeout] identifier[dev] . identifier[timeout] = literal[int] keyword[while] identifier[bytes_remaining] > literal[int] : identifier[ack] = identifier[dev] . identifier[read] ( literal[int] ) keyword[if] identifier[ack] keyword[is] keyword[None] keyword[or] identifier[ack] != literal[string] : identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] ) identifier[sys] . identifier[exit] ( literal[int] ) keyword[if] identifier[HAS_BUFFER] : identifier[buf_size] = identifier[BUFFER_SIZE] keyword[else] : identifier[buf_size] = identifier[BUFFER_SIZE] // literal[int] identifier[read_size] = identifier[min] ( identifier[bytes_remaining] , identifier[buf_size] ) identifier[buf] = identifier[src_file] . identifier[read] ( identifier[read_size] ) keyword[if] identifier[HAS_BUFFER] : identifier[dev] . identifier[write] ( identifier[buf] ) keyword[else] : identifier[dev] . identifier[write] ( identifier[binascii] . identifier[hexlify] ( identifier[buf] )) identifier[bytes_remaining] -= identifier[read_size] identifier[dev] . identifier[timeout] = identifier[save_timeout]
def send_file_to_remote(dev, src_file, dst_filename, filesize, dst_mode='wb'): """Intended to be passed to the `remote` function as the xfer_func argument. Matches up with recv_file_from_host. """ bytes_remaining = filesize save_timeout = dev.timeout dev.timeout = 1 while bytes_remaining > 0: # Wait for ack so we don't get too far ahead of the remote ack = dev.read(1) if ack is None or ack != b'\x06': sys.stderr.write('timed out or error in transfer to remote\n') sys.exit(2) # depends on [control=['if'], data=[]] if HAS_BUFFER: buf_size = BUFFER_SIZE # depends on [control=['if'], data=[]] else: buf_size = BUFFER_SIZE // 2 read_size = min(bytes_remaining, buf_size) buf = src_file.read(read_size) #sys.stdout.write('\r%d/%d' % (filesize - bytes_remaining, filesize)) #sys.stdout.flush() if HAS_BUFFER: dev.write(buf) # depends on [control=['if'], data=[]] else: dev.write(binascii.hexlify(buf)) bytes_remaining -= read_size # depends on [control=['while'], data=['bytes_remaining']] #sys.stdout.write('\r') dev.timeout = save_timeout
def dist(self, point, exponent=2.0): """Return the distance of ``point`` to this set. Parameters ---------- point : `array-like` or float Point whose distance to calculate. Its length must be equal to the set's dimension. Can be a float in the 1d case. exponent : non-zero float or ``float('inf')``, optional Exponent of the norm used in the distance calculation. Returns ------- dist : float Distance to the interior of the IntervalProd. Points strictly inside have distance ``0.0``, points with ``NaN`` have distance ``float('inf')``. See Also -------- numpy.linalg.norm : norm used to compute the distance Examples -------- >>> min_pt, max_pt = [-1, 0, 2], [-0.5, 0, 3] >>> rbox = IntervalProd(min_pt, max_pt) >>> rbox.dist([-5, 3, 2]) 5.0 >>> rbox.dist([-5, 3, 2], exponent=float('inf')) 4.0 """ point = np.atleast_1d(point) if len(point) != self.ndim: raise ValueError('`point` must have length {}, got {}' ''.format(self.ndim, len(point))) if np.any(np.isnan(point)): return float('inf') i_larger = np.where(point > self.max_pt) i_smaller = np.where(point < self.min_pt) # Access [0] since np.where returns a tuple. if len(i_larger[0]) == 0 and len(i_smaller[0]) == 0: return 0.0 else: proj = np.concatenate((point[i_larger], point[i_smaller])) border = np.concatenate((self.max_pt[i_larger], self.min_pt[i_smaller])) return np.linalg.norm(proj - border, ord=exponent)
def function[dist, parameter[self, point, exponent]]: constant[Return the distance of ``point`` to this set. Parameters ---------- point : `array-like` or float Point whose distance to calculate. Its length must be equal to the set's dimension. Can be a float in the 1d case. exponent : non-zero float or ``float('inf')``, optional Exponent of the norm used in the distance calculation. Returns ------- dist : float Distance to the interior of the IntervalProd. Points strictly inside have distance ``0.0``, points with ``NaN`` have distance ``float('inf')``. See Also -------- numpy.linalg.norm : norm used to compute the distance Examples -------- >>> min_pt, max_pt = [-1, 0, 2], [-0.5, 0, 3] >>> rbox = IntervalProd(min_pt, max_pt) >>> rbox.dist([-5, 3, 2]) 5.0 >>> rbox.dist([-5, 3, 2], exponent=float('inf')) 4.0 ] variable[point] assign[=] call[name[np].atleast_1d, parameter[name[point]]] if compare[call[name[len], parameter[name[point]]] not_equal[!=] name[self].ndim] begin[:] <ast.Raise object at 0x7da1b1e94880> if call[name[np].any, parameter[call[name[np].isnan, parameter[name[point]]]]] begin[:] return[call[name[float], parameter[constant[inf]]]] variable[i_larger] assign[=] call[name[np].where, parameter[compare[name[point] greater[>] name[self].max_pt]]] variable[i_smaller] assign[=] call[name[np].where, parameter[compare[name[point] less[<] name[self].min_pt]]] if <ast.BoolOp object at 0x7da1b1e95570> begin[:] return[constant[0.0]]
keyword[def] identifier[dist] ( identifier[self] , identifier[point] , identifier[exponent] = literal[int] ): literal[string] identifier[point] = identifier[np] . identifier[atleast_1d] ( identifier[point] ) keyword[if] identifier[len] ( identifier[point] )!= identifier[self] . identifier[ndim] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[self] . identifier[ndim] , identifier[len] ( identifier[point] ))) keyword[if] identifier[np] . identifier[any] ( identifier[np] . identifier[isnan] ( identifier[point] )): keyword[return] identifier[float] ( literal[string] ) identifier[i_larger] = identifier[np] . identifier[where] ( identifier[point] > identifier[self] . identifier[max_pt] ) identifier[i_smaller] = identifier[np] . identifier[where] ( identifier[point] < identifier[self] . identifier[min_pt] ) keyword[if] identifier[len] ( identifier[i_larger] [ literal[int] ])== literal[int] keyword[and] identifier[len] ( identifier[i_smaller] [ literal[int] ])== literal[int] : keyword[return] literal[int] keyword[else] : identifier[proj] = identifier[np] . identifier[concatenate] (( identifier[point] [ identifier[i_larger] ], identifier[point] [ identifier[i_smaller] ])) identifier[border] = identifier[np] . identifier[concatenate] (( identifier[self] . identifier[max_pt] [ identifier[i_larger] ], identifier[self] . identifier[min_pt] [ identifier[i_smaller] ])) keyword[return] identifier[np] . identifier[linalg] . identifier[norm] ( identifier[proj] - identifier[border] , identifier[ord] = identifier[exponent] )
def dist(self, point, exponent=2.0): """Return the distance of ``point`` to this set. Parameters ---------- point : `array-like` or float Point whose distance to calculate. Its length must be equal to the set's dimension. Can be a float in the 1d case. exponent : non-zero float or ``float('inf')``, optional Exponent of the norm used in the distance calculation. Returns ------- dist : float Distance to the interior of the IntervalProd. Points strictly inside have distance ``0.0``, points with ``NaN`` have distance ``float('inf')``. See Also -------- numpy.linalg.norm : norm used to compute the distance Examples -------- >>> min_pt, max_pt = [-1, 0, 2], [-0.5, 0, 3] >>> rbox = IntervalProd(min_pt, max_pt) >>> rbox.dist([-5, 3, 2]) 5.0 >>> rbox.dist([-5, 3, 2], exponent=float('inf')) 4.0 """ point = np.atleast_1d(point) if len(point) != self.ndim: raise ValueError('`point` must have length {}, got {}'.format(self.ndim, len(point))) # depends on [control=['if'], data=[]] if np.any(np.isnan(point)): return float('inf') # depends on [control=['if'], data=[]] i_larger = np.where(point > self.max_pt) i_smaller = np.where(point < self.min_pt) # Access [0] since np.where returns a tuple. if len(i_larger[0]) == 0 and len(i_smaller[0]) == 0: return 0.0 # depends on [control=['if'], data=[]] else: proj = np.concatenate((point[i_larger], point[i_smaller])) border = np.concatenate((self.max_pt[i_larger], self.min_pt[i_smaller])) return np.linalg.norm(proj - border, ord=exponent)
def _start_process(self, classpath): """Given a classpath prepared for running ENSIME, spawns a server process in a way that is otherwise agnostic to how the strategy installs ENSIME. Args: classpath (list of str): list of paths to jars or directories (Within this function the list is joined with a system dependent path separator to create a single string argument suitable to pass to ``java -cp`` as a classpath) Returns: EnsimeProcess: A process handle for the launched server. """ cache_dir = self.config['cache-dir'] java_flags = self.config['java-flags'] iswindows = os.name == 'nt' Util.mkdir_p(cache_dir) log_path = os.path.join(cache_dir, "server.log") log = open(log_path, "w") null = open(os.devnull, "r") java = os.path.join(self.config['java-home'], 'bin', 'java.exe' if iswindows else 'java') if not os.path.exists(java): raise InvalidJavaPathError(errno.ENOENT, 'No such file or directory', java) elif not os.access(java, os.X_OK): raise InvalidJavaPathError(errno.EACCES, 'Permission denied', java) args = ( [java, "-cp", (';' if iswindows else ':').join(classpath)] + [a for a in java_flags if a] + ["-Densime.config={}".format(self.config.filepath), "org.ensime.server.Server"]) process = subprocess.Popen( args, stdin=null, stdout=log, stderr=subprocess.STDOUT) pid_path = os.path.join(cache_dir, "server.pid") Util.write_file(pid_path, str(process.pid)) def on_stop(): log.close() null.close() with catch(Exception): os.remove(pid_path) return EnsimeProcess(cache_dir, process, log_path, on_stop)
def function[_start_process, parameter[self, classpath]]: constant[Given a classpath prepared for running ENSIME, spawns a server process in a way that is otherwise agnostic to how the strategy installs ENSIME. Args: classpath (list of str): list of paths to jars or directories (Within this function the list is joined with a system dependent path separator to create a single string argument suitable to pass to ``java -cp`` as a classpath) Returns: EnsimeProcess: A process handle for the launched server. ] variable[cache_dir] assign[=] call[name[self].config][constant[cache-dir]] variable[java_flags] assign[=] call[name[self].config][constant[java-flags]] variable[iswindows] assign[=] compare[name[os].name equal[==] constant[nt]] call[name[Util].mkdir_p, parameter[name[cache_dir]]] variable[log_path] assign[=] call[name[os].path.join, parameter[name[cache_dir], constant[server.log]]] variable[log] assign[=] call[name[open], parameter[name[log_path], constant[w]]] variable[null] assign[=] call[name[open], parameter[name[os].devnull, constant[r]]] variable[java] assign[=] call[name[os].path.join, parameter[call[name[self].config][constant[java-home]], constant[bin], <ast.IfExp object at 0x7da207f03e20>]] if <ast.UnaryOp object at 0x7da207f01b70> begin[:] <ast.Raise object at 0x7da207f026e0> variable[args] assign[=] binary_operation[binary_operation[list[[<ast.Name object at 0x7da207f01390>, <ast.Constant object at 0x7da207f011b0>, <ast.Call object at 0x7da207f03610>]] + <ast.ListComp object at 0x7da207f00670>] + list[[<ast.Call object at 0x7da207f03490>, <ast.Constant object at 0x7da207f039d0>]]] variable[process] assign[=] call[name[subprocess].Popen, parameter[name[args]]] variable[pid_path] assign[=] call[name[os].path.join, parameter[name[cache_dir], constant[server.pid]]] call[name[Util].write_file, parameter[name[pid_path], call[name[str], parameter[name[process].pid]]]] def function[on_stop, parameter[]]: call[name[log].close, parameter[]] call[name[null].close, parameter[]] with call[name[catch], parameter[name[Exception]]] begin[:] call[name[os].remove, parameter[name[pid_path]]] return[call[name[EnsimeProcess], parameter[name[cache_dir], name[process], name[log_path], name[on_stop]]]]
keyword[def] identifier[_start_process] ( identifier[self] , identifier[classpath] ): literal[string] identifier[cache_dir] = identifier[self] . identifier[config] [ literal[string] ] identifier[java_flags] = identifier[self] . identifier[config] [ literal[string] ] identifier[iswindows] = identifier[os] . identifier[name] == literal[string] identifier[Util] . identifier[mkdir_p] ( identifier[cache_dir] ) identifier[log_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[cache_dir] , literal[string] ) identifier[log] = identifier[open] ( identifier[log_path] , literal[string] ) identifier[null] = identifier[open] ( identifier[os] . identifier[devnull] , literal[string] ) identifier[java] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[config] [ literal[string] ], literal[string] , literal[string] keyword[if] identifier[iswindows] keyword[else] literal[string] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[java] ): keyword[raise] identifier[InvalidJavaPathError] ( identifier[errno] . identifier[ENOENT] , literal[string] , identifier[java] ) keyword[elif] keyword[not] identifier[os] . identifier[access] ( identifier[java] , identifier[os] . identifier[X_OK] ): keyword[raise] identifier[InvalidJavaPathError] ( identifier[errno] . identifier[EACCES] , literal[string] , identifier[java] ) identifier[args] =( [ identifier[java] , literal[string] ,( literal[string] keyword[if] identifier[iswindows] keyword[else] literal[string] ). identifier[join] ( identifier[classpath] )]+ [ identifier[a] keyword[for] identifier[a] keyword[in] identifier[java_flags] keyword[if] identifier[a] ]+ [ literal[string] . identifier[format] ( identifier[self] . identifier[config] . identifier[filepath] ), literal[string] ]) identifier[process] = identifier[subprocess] . identifier[Popen] ( identifier[args] , identifier[stdin] = identifier[null] , identifier[stdout] = identifier[log] , identifier[stderr] = identifier[subprocess] . identifier[STDOUT] ) identifier[pid_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[cache_dir] , literal[string] ) identifier[Util] . identifier[write_file] ( identifier[pid_path] , identifier[str] ( identifier[process] . identifier[pid] )) keyword[def] identifier[on_stop] (): identifier[log] . identifier[close] () identifier[null] . identifier[close] () keyword[with] identifier[catch] ( identifier[Exception] ): identifier[os] . identifier[remove] ( identifier[pid_path] ) keyword[return] identifier[EnsimeProcess] ( identifier[cache_dir] , identifier[process] , identifier[log_path] , identifier[on_stop] )
def _start_process(self, classpath): """Given a classpath prepared for running ENSIME, spawns a server process in a way that is otherwise agnostic to how the strategy installs ENSIME. Args: classpath (list of str): list of paths to jars or directories (Within this function the list is joined with a system dependent path separator to create a single string argument suitable to pass to ``java -cp`` as a classpath) Returns: EnsimeProcess: A process handle for the launched server. """ cache_dir = self.config['cache-dir'] java_flags = self.config['java-flags'] iswindows = os.name == 'nt' Util.mkdir_p(cache_dir) log_path = os.path.join(cache_dir, 'server.log') log = open(log_path, 'w') null = open(os.devnull, 'r') java = os.path.join(self.config['java-home'], 'bin', 'java.exe' if iswindows else 'java') if not os.path.exists(java): raise InvalidJavaPathError(errno.ENOENT, 'No such file or directory', java) # depends on [control=['if'], data=[]] elif not os.access(java, os.X_OK): raise InvalidJavaPathError(errno.EACCES, 'Permission denied', java) # depends on [control=['if'], data=[]] args = [java, '-cp', (';' if iswindows else ':').join(classpath)] + [a for a in java_flags if a] + ['-Densime.config={}'.format(self.config.filepath), 'org.ensime.server.Server'] process = subprocess.Popen(args, stdin=null, stdout=log, stderr=subprocess.STDOUT) pid_path = os.path.join(cache_dir, 'server.pid') Util.write_file(pid_path, str(process.pid)) def on_stop(): log.close() null.close() with catch(Exception): os.remove(pid_path) # depends on [control=['with'], data=[]] return EnsimeProcess(cache_dir, process, log_path, on_stop)
def _create_token_set(self): """Creates a token set of all tokens in the index using `lunr.TokenSet` """ self.token_set = TokenSet.from_list(sorted(list(self.inverted_index.keys())))
def function[_create_token_set, parameter[self]]: constant[Creates a token set of all tokens in the index using `lunr.TokenSet` ] name[self].token_set assign[=] call[name[TokenSet].from_list, parameter[call[name[sorted], parameter[call[name[list], parameter[call[name[self].inverted_index.keys, parameter[]]]]]]]]
keyword[def] identifier[_create_token_set] ( identifier[self] ): literal[string] identifier[self] . identifier[token_set] = identifier[TokenSet] . identifier[from_list] ( identifier[sorted] ( identifier[list] ( identifier[self] . identifier[inverted_index] . identifier[keys] ())))
def _create_token_set(self): """Creates a token set of all tokens in the index using `lunr.TokenSet` """ self.token_set = TokenSet.from_list(sorted(list(self.inverted_index.keys())))
def _get_summary_struct(self): """ Returns a structured description of the model, including (where relevant) the schema of the training data, description of the training data, training statistics, and model hyperparameters. Returns ------- sections : list (of list of tuples) A list of summary sections. Each section is a list. Each item in a section list is a tuple of the form: ('<label>','<field>') section_titles: list A list of section titles. The order matches that of the 'sections' object. """ g = self.graph section_titles = ['Graph'] graph_summary = [(k, _precomputed_field(v)) for k, v in six.iteritems(g.summary())] sections = [graph_summary] # collect other sections results = [(k, _precomputed_field(v)) for k, v in six.iteritems(self._result_fields())] methods = [(k, _precomputed_field(v)) for k, v in six.iteritems(self._method_fields())] settings = [(k, v) for k, v in six.iteritems(self._setting_fields())] metrics = [(k, v) for k, v in six.iteritems(self._metric_fields())] optional_sections = [('Results', results), ('Settings', settings), \ ('Metrics', metrics), ('Methods', methods)] # if section is not empty, append to summary structure for (title, section) in optional_sections: if len(section) > 0: section_titles.append(title) sections.append(section) return (sections, section_titles)
def function[_get_summary_struct, parameter[self]]: constant[ Returns a structured description of the model, including (where relevant) the schema of the training data, description of the training data, training statistics, and model hyperparameters. Returns ------- sections : list (of list of tuples) A list of summary sections. Each section is a list. Each item in a section list is a tuple of the form: ('<label>','<field>') section_titles: list A list of section titles. The order matches that of the 'sections' object. ] variable[g] assign[=] name[self].graph variable[section_titles] assign[=] list[[<ast.Constant object at 0x7da1b1f75690>]] variable[graph_summary] assign[=] <ast.ListComp object at 0x7da1b1f75c30> variable[sections] assign[=] list[[<ast.Name object at 0x7da1b1f77370>]] variable[results] assign[=] <ast.ListComp object at 0x7da1b1f74640> variable[methods] assign[=] <ast.ListComp object at 0x7da204961a50> variable[settings] assign[=] <ast.ListComp object at 0x7da1b208c130> variable[metrics] assign[=] <ast.ListComp object at 0x7da1b208c670> variable[optional_sections] assign[=] list[[<ast.Tuple object at 0x7da1b208c2e0>, <ast.Tuple object at 0x7da1b2097430>, <ast.Tuple object at 0x7da1b2096e00>, <ast.Tuple object at 0x7da1b2097ee0>]] for taget[tuple[[<ast.Name object at 0x7da1b2097820>, <ast.Name object at 0x7da1b2096cb0>]]] in starred[name[optional_sections]] begin[:] if compare[call[name[len], parameter[name[section]]] greater[>] constant[0]] begin[:] call[name[section_titles].append, parameter[name[title]]] call[name[sections].append, parameter[name[section]]] return[tuple[[<ast.Name object at 0x7da1b2097c10>, <ast.Name object at 0x7da1b2097670>]]]
keyword[def] identifier[_get_summary_struct] ( identifier[self] ): literal[string] identifier[g] = identifier[self] . identifier[graph] identifier[section_titles] =[ literal[string] ] identifier[graph_summary] =[( identifier[k] , identifier[_precomputed_field] ( identifier[v] )) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[six] . identifier[iteritems] ( identifier[g] . identifier[summary] ())] identifier[sections] =[ identifier[graph_summary] ] identifier[results] =[( identifier[k] , identifier[_precomputed_field] ( identifier[v] )) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[six] . identifier[iteritems] ( identifier[self] . identifier[_result_fields] ())] identifier[methods] =[( identifier[k] , identifier[_precomputed_field] ( identifier[v] )) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[six] . identifier[iteritems] ( identifier[self] . identifier[_method_fields] ())] identifier[settings] =[( identifier[k] , identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[six] . identifier[iteritems] ( identifier[self] . identifier[_setting_fields] ())] identifier[metrics] =[( identifier[k] , identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[six] . identifier[iteritems] ( identifier[self] . identifier[_metric_fields] ())] identifier[optional_sections] =[( literal[string] , identifier[results] ),( literal[string] , identifier[settings] ),( literal[string] , identifier[metrics] ),( literal[string] , identifier[methods] )] keyword[for] ( identifier[title] , identifier[section] ) keyword[in] identifier[optional_sections] : keyword[if] identifier[len] ( identifier[section] )> literal[int] : identifier[section_titles] . identifier[append] ( identifier[title] ) identifier[sections] . identifier[append] ( identifier[section] ) keyword[return] ( identifier[sections] , identifier[section_titles] )
def _get_summary_struct(self): """ Returns a structured description of the model, including (where relevant) the schema of the training data, description of the training data, training statistics, and model hyperparameters. Returns ------- sections : list (of list of tuples) A list of summary sections. Each section is a list. Each item in a section list is a tuple of the form: ('<label>','<field>') section_titles: list A list of section titles. The order matches that of the 'sections' object. """ g = self.graph section_titles = ['Graph'] graph_summary = [(k, _precomputed_field(v)) for (k, v) in six.iteritems(g.summary())] sections = [graph_summary] # collect other sections results = [(k, _precomputed_field(v)) for (k, v) in six.iteritems(self._result_fields())] methods = [(k, _precomputed_field(v)) for (k, v) in six.iteritems(self._method_fields())] settings = [(k, v) for (k, v) in six.iteritems(self._setting_fields())] metrics = [(k, v) for (k, v) in six.iteritems(self._metric_fields())] optional_sections = [('Results', results), ('Settings', settings), ('Metrics', metrics), ('Methods', methods)] # if section is not empty, append to summary structure for (title, section) in optional_sections: if len(section) > 0: section_titles.append(title) sections.append(section) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return (sections, section_titles)
def mode(name, mode, quotatype): ''' Set the quota for the system name The filesystem to set the quota mode on mode Whether the quota system is on or off quotatype Must be ``user`` or ``group`` ''' ret = {'name': name, 'changes': {}, 'result': None, 'comment': ''} fun = 'off' if mode is True: fun = 'on' if __salt__['quota.get_mode'](name)[name][quotatype] == fun: ret['result'] = True ret['comment'] = 'Quota for {0} already set to {1}'.format(name, fun) return ret if __opts__['test']: ret['comment'] = 'Quota for {0} needs to be set to {1}'.format(name, fun) return ret if __salt__['quota.{0}'.format(fun)](name): ret['changes'] = {'quota': name} ret['result'] = True ret['comment'] = 'Set quota for {0} to {1}'.format(name, fun) return ret else: ret['result'] = False ret['comment'] = 'Failed to set quota for {0} to {1}'.format(name, fun) return ret
def function[mode, parameter[name, mode, quotatype]]: constant[ Set the quota for the system name The filesystem to set the quota mode on mode Whether the quota system is on or off quotatype Must be ``user`` or ``group`` ] variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b21ef8e0>, <ast.Constant object at 0x7da1b21ec2e0>, <ast.Constant object at 0x7da1b21ecd30>, <ast.Constant object at 0x7da1b21ee440>], [<ast.Name object at 0x7da1b21ed8d0>, <ast.Dict object at 0x7da1b21ed0f0>, <ast.Constant object at 0x7da1b21ef340>, <ast.Constant object at 0x7da1b21ef580>]] variable[fun] assign[=] constant[off] if compare[name[mode] is constant[True]] begin[:] variable[fun] assign[=] constant[on] if compare[call[call[call[call[name[__salt__]][constant[quota.get_mode]], parameter[name[name]]]][name[name]]][name[quotatype]] equal[==] name[fun]] begin[:] call[name[ret]][constant[result]] assign[=] constant[True] call[name[ret]][constant[comment]] assign[=] call[constant[Quota for {0} already set to {1}].format, parameter[name[name], name[fun]]] return[name[ret]] if call[name[__opts__]][constant[test]] begin[:] call[name[ret]][constant[comment]] assign[=] call[constant[Quota for {0} needs to be set to {1}].format, parameter[name[name], name[fun]]] return[name[ret]] if call[call[name[__salt__]][call[constant[quota.{0}].format, parameter[name[fun]]]], parameter[name[name]]] begin[:] call[name[ret]][constant[changes]] assign[=] dictionary[[<ast.Constant object at 0x7da1b21bc700>], [<ast.Name object at 0x7da1b21bdab0>]] call[name[ret]][constant[result]] assign[=] constant[True] call[name[ret]][constant[comment]] assign[=] call[constant[Set quota for {0} to {1}].format, parameter[name[name], name[fun]]] return[name[ret]]
keyword[def] identifier[mode] ( identifier[name] , identifier[mode] , identifier[quotatype] ): literal[string] identifier[ret] ={ literal[string] : identifier[name] , literal[string] :{}, literal[string] : keyword[None] , literal[string] : literal[string] } identifier[fun] = literal[string] keyword[if] identifier[mode] keyword[is] keyword[True] : identifier[fun] = literal[string] keyword[if] identifier[__salt__] [ literal[string] ]( identifier[name] )[ identifier[name] ][ identifier[quotatype] ]== identifier[fun] : identifier[ret] [ literal[string] ]= keyword[True] identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] , identifier[fun] ) keyword[return] identifier[ret] keyword[if] identifier[__opts__] [ literal[string] ]: identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] , identifier[fun] ) keyword[return] identifier[ret] keyword[if] identifier[__salt__] [ literal[string] . identifier[format] ( identifier[fun] )]( identifier[name] ): identifier[ret] [ literal[string] ]={ literal[string] : identifier[name] } identifier[ret] [ literal[string] ]= keyword[True] identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] , identifier[fun] ) keyword[return] identifier[ret] keyword[else] : identifier[ret] [ literal[string] ]= keyword[False] identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] , identifier[fun] ) keyword[return] identifier[ret]
def mode(name, mode, quotatype): """ Set the quota for the system name The filesystem to set the quota mode on mode Whether the quota system is on or off quotatype Must be ``user`` or ``group`` """ ret = {'name': name, 'changes': {}, 'result': None, 'comment': ''} fun = 'off' if mode is True: fun = 'on' # depends on [control=['if'], data=[]] if __salt__['quota.get_mode'](name)[name][quotatype] == fun: ret['result'] = True ret['comment'] = 'Quota for {0} already set to {1}'.format(name, fun) return ret # depends on [control=['if'], data=['fun']] if __opts__['test']: ret['comment'] = 'Quota for {0} needs to be set to {1}'.format(name, fun) return ret # depends on [control=['if'], data=[]] if __salt__['quota.{0}'.format(fun)](name): ret['changes'] = {'quota': name} ret['result'] = True ret['comment'] = 'Set quota for {0} to {1}'.format(name, fun) return ret # depends on [control=['if'], data=[]] else: ret['result'] = False ret['comment'] = 'Failed to set quota for {0} to {1}'.format(name, fun) return ret
def get_snapshot_brok(self, snap_output, exit_status): """ Create snapshot (check_result type) brok :param snap_output: value of output :type snap_output: str :param exit_status: status of exit :type exit_status: integer :return: Brok object :rtype: alignak.Brok """ data = { 'uuid': self.uuid, 'snapshot_output': snap_output, 'snapshot_time': int(time.time()), 'snapshot_exit_status': exit_status, } self.fill_data_brok_from(data, 'check_result') return Brok({'type': self.my_type + '_snapshot', 'data': data})
def function[get_snapshot_brok, parameter[self, snap_output, exit_status]]: constant[ Create snapshot (check_result type) brok :param snap_output: value of output :type snap_output: str :param exit_status: status of exit :type exit_status: integer :return: Brok object :rtype: alignak.Brok ] variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da18f58d660>, <ast.Constant object at 0x7da18f58f970>, <ast.Constant object at 0x7da18f58dd50>, <ast.Constant object at 0x7da18f58e170>], [<ast.Attribute object at 0x7da18f58ecb0>, <ast.Name object at 0x7da18f58d4e0>, <ast.Call object at 0x7da18f58c6a0>, <ast.Name object at 0x7da18f58c520>]] call[name[self].fill_data_brok_from, parameter[name[data], constant[check_result]]] return[call[name[Brok], parameter[dictionary[[<ast.Constant object at 0x7da18bc70d00>, <ast.Constant object at 0x7da18bc71210>], [<ast.BinOp object at 0x7da18bc738e0>, <ast.Name object at 0x7da18bc709a0>]]]]]
keyword[def] identifier[get_snapshot_brok] ( identifier[self] , identifier[snap_output] , identifier[exit_status] ): literal[string] identifier[data] ={ literal[string] : identifier[self] . identifier[uuid] , literal[string] : identifier[snap_output] , literal[string] : identifier[int] ( identifier[time] . identifier[time] ()), literal[string] : identifier[exit_status] , } identifier[self] . identifier[fill_data_brok_from] ( identifier[data] , literal[string] ) keyword[return] identifier[Brok] ({ literal[string] : identifier[self] . identifier[my_type] + literal[string] , literal[string] : identifier[data] })
def get_snapshot_brok(self, snap_output, exit_status): """ Create snapshot (check_result type) brok :param snap_output: value of output :type snap_output: str :param exit_status: status of exit :type exit_status: integer :return: Brok object :rtype: alignak.Brok """ data = {'uuid': self.uuid, 'snapshot_output': snap_output, 'snapshot_time': int(time.time()), 'snapshot_exit_status': exit_status} self.fill_data_brok_from(data, 'check_result') return Brok({'type': self.my_type + '_snapshot', 'data': data})
def process_slo(self, keep_local_session=False, request_id=None, delete_session_cb=None): """ Process the SAML Logout Response / Logout Request sent by the IdP. :param keep_local_session: When false will destroy the local session, otherwise will destroy it :type keep_local_session: bool :param request_id: The ID of the LogoutRequest sent by this SP to the IdP :type request_id: string :returns: Redirection url """ self.__errors = [] self.__error_reason = None get_data = 'get_data' in self.__request_data and self.__request_data['get_data'] if get_data and 'SAMLResponse' in get_data: logout_response = OneLogin_Saml2_Logout_Response(self.__settings, get_data['SAMLResponse']) self.__last_response = logout_response.get_xml() if not self.validate_response_signature(get_data): self.__errors.append('invalid_logout_response_signature') self.__errors.append('Signature validation failed. Logout Response rejected') elif not logout_response.is_valid(self.__request_data, request_id): self.__errors.append('invalid_logout_response') self.__error_reason = logout_response.get_error() elif logout_response.get_status() != OneLogin_Saml2_Constants.STATUS_SUCCESS: self.__errors.append('logout_not_success') else: self.__last_message_id = logout_response.id if not keep_local_session: OneLogin_Saml2_Utils.delete_local_session(delete_session_cb) elif get_data and 'SAMLRequest' in get_data: logout_request = OneLogin_Saml2_Logout_Request(self.__settings, get_data['SAMLRequest']) self.__last_request = logout_request.get_xml() if not self.validate_request_signature(get_data): self.__errors.append("invalid_logout_request_signature") self.__errors.append('Signature validation failed. Logout Request rejected') elif not logout_request.is_valid(self.__request_data): self.__errors.append('invalid_logout_request') self.__error_reason = logout_request.get_error() else: if not keep_local_session: OneLogin_Saml2_Utils.delete_local_session(delete_session_cb) in_response_to = logout_request.id self.__last_message_id = logout_request.id response_builder = OneLogin_Saml2_Logout_Response(self.__settings) response_builder.build(in_response_to) self.__last_response = response_builder.get_xml() logout_response = response_builder.get_response() parameters = {'SAMLResponse': logout_response} if 'RelayState' in self.__request_data['get_data']: parameters['RelayState'] = self.__request_data['get_data']['RelayState'] security = self.__settings.get_security_data() if security['logoutResponseSigned']: self.add_response_signature(parameters, security['signatureAlgorithm']) return self.redirect_to(self.get_slo_url(), parameters) else: self.__errors.append('invalid_binding') raise OneLogin_Saml2_Error( 'SAML LogoutRequest/LogoutResponse not found. Only supported HTTP_REDIRECT Binding', OneLogin_Saml2_Error.SAML_LOGOUTMESSAGE_NOT_FOUND )
def function[process_slo, parameter[self, keep_local_session, request_id, delete_session_cb]]: constant[ Process the SAML Logout Response / Logout Request sent by the IdP. :param keep_local_session: When false will destroy the local session, otherwise will destroy it :type keep_local_session: bool :param request_id: The ID of the LogoutRequest sent by this SP to the IdP :type request_id: string :returns: Redirection url ] name[self].__errors assign[=] list[[]] name[self].__error_reason assign[=] constant[None] variable[get_data] assign[=] <ast.BoolOp object at 0x7da1b170de70> if <ast.BoolOp object at 0x7da1b170c1c0> begin[:] variable[logout_response] assign[=] call[name[OneLogin_Saml2_Logout_Response], parameter[name[self].__settings, call[name[get_data]][constant[SAMLResponse]]]] name[self].__last_response assign[=] call[name[logout_response].get_xml, parameter[]] if <ast.UnaryOp object at 0x7da1b170f2b0> begin[:] call[name[self].__errors.append, parameter[constant[invalid_logout_response_signature]]] call[name[self].__errors.append, parameter[constant[Signature validation failed. Logout Response rejected]]]
keyword[def] identifier[process_slo] ( identifier[self] , identifier[keep_local_session] = keyword[False] , identifier[request_id] = keyword[None] , identifier[delete_session_cb] = keyword[None] ): literal[string] identifier[self] . identifier[__errors] =[] identifier[self] . identifier[__error_reason] = keyword[None] identifier[get_data] = literal[string] keyword[in] identifier[self] . identifier[__request_data] keyword[and] identifier[self] . identifier[__request_data] [ literal[string] ] keyword[if] identifier[get_data] keyword[and] literal[string] keyword[in] identifier[get_data] : identifier[logout_response] = identifier[OneLogin_Saml2_Logout_Response] ( identifier[self] . identifier[__settings] , identifier[get_data] [ literal[string] ]) identifier[self] . identifier[__last_response] = identifier[logout_response] . identifier[get_xml] () keyword[if] keyword[not] identifier[self] . identifier[validate_response_signature] ( identifier[get_data] ): identifier[self] . identifier[__errors] . identifier[append] ( literal[string] ) identifier[self] . identifier[__errors] . identifier[append] ( literal[string] ) keyword[elif] keyword[not] identifier[logout_response] . identifier[is_valid] ( identifier[self] . identifier[__request_data] , identifier[request_id] ): identifier[self] . identifier[__errors] . identifier[append] ( literal[string] ) identifier[self] . identifier[__error_reason] = identifier[logout_response] . identifier[get_error] () keyword[elif] identifier[logout_response] . identifier[get_status] ()!= identifier[OneLogin_Saml2_Constants] . identifier[STATUS_SUCCESS] : identifier[self] . identifier[__errors] . identifier[append] ( literal[string] ) keyword[else] : identifier[self] . identifier[__last_message_id] = identifier[logout_response] . identifier[id] keyword[if] keyword[not] identifier[keep_local_session] : identifier[OneLogin_Saml2_Utils] . identifier[delete_local_session] ( identifier[delete_session_cb] ) keyword[elif] identifier[get_data] keyword[and] literal[string] keyword[in] identifier[get_data] : identifier[logout_request] = identifier[OneLogin_Saml2_Logout_Request] ( identifier[self] . identifier[__settings] , identifier[get_data] [ literal[string] ]) identifier[self] . identifier[__last_request] = identifier[logout_request] . identifier[get_xml] () keyword[if] keyword[not] identifier[self] . identifier[validate_request_signature] ( identifier[get_data] ): identifier[self] . identifier[__errors] . identifier[append] ( literal[string] ) identifier[self] . identifier[__errors] . identifier[append] ( literal[string] ) keyword[elif] keyword[not] identifier[logout_request] . identifier[is_valid] ( identifier[self] . identifier[__request_data] ): identifier[self] . identifier[__errors] . identifier[append] ( literal[string] ) identifier[self] . identifier[__error_reason] = identifier[logout_request] . identifier[get_error] () keyword[else] : keyword[if] keyword[not] identifier[keep_local_session] : identifier[OneLogin_Saml2_Utils] . identifier[delete_local_session] ( identifier[delete_session_cb] ) identifier[in_response_to] = identifier[logout_request] . identifier[id] identifier[self] . identifier[__last_message_id] = identifier[logout_request] . identifier[id] identifier[response_builder] = identifier[OneLogin_Saml2_Logout_Response] ( identifier[self] . identifier[__settings] ) identifier[response_builder] . identifier[build] ( identifier[in_response_to] ) identifier[self] . identifier[__last_response] = identifier[response_builder] . identifier[get_xml] () identifier[logout_response] = identifier[response_builder] . identifier[get_response] () identifier[parameters] ={ literal[string] : identifier[logout_response] } keyword[if] literal[string] keyword[in] identifier[self] . identifier[__request_data] [ literal[string] ]: identifier[parameters] [ literal[string] ]= identifier[self] . identifier[__request_data] [ literal[string] ][ literal[string] ] identifier[security] = identifier[self] . identifier[__settings] . identifier[get_security_data] () keyword[if] identifier[security] [ literal[string] ]: identifier[self] . identifier[add_response_signature] ( identifier[parameters] , identifier[security] [ literal[string] ]) keyword[return] identifier[self] . identifier[redirect_to] ( identifier[self] . identifier[get_slo_url] (), identifier[parameters] ) keyword[else] : identifier[self] . identifier[__errors] . identifier[append] ( literal[string] ) keyword[raise] identifier[OneLogin_Saml2_Error] ( literal[string] , identifier[OneLogin_Saml2_Error] . identifier[SAML_LOGOUTMESSAGE_NOT_FOUND] )
def process_slo(self, keep_local_session=False, request_id=None, delete_session_cb=None): """ Process the SAML Logout Response / Logout Request sent by the IdP. :param keep_local_session: When false will destroy the local session, otherwise will destroy it :type keep_local_session: bool :param request_id: The ID of the LogoutRequest sent by this SP to the IdP :type request_id: string :returns: Redirection url """ self.__errors = [] self.__error_reason = None get_data = 'get_data' in self.__request_data and self.__request_data['get_data'] if get_data and 'SAMLResponse' in get_data: logout_response = OneLogin_Saml2_Logout_Response(self.__settings, get_data['SAMLResponse']) self.__last_response = logout_response.get_xml() if not self.validate_response_signature(get_data): self.__errors.append('invalid_logout_response_signature') self.__errors.append('Signature validation failed. Logout Response rejected') # depends on [control=['if'], data=[]] elif not logout_response.is_valid(self.__request_data, request_id): self.__errors.append('invalid_logout_response') self.__error_reason = logout_response.get_error() # depends on [control=['if'], data=[]] elif logout_response.get_status() != OneLogin_Saml2_Constants.STATUS_SUCCESS: self.__errors.append('logout_not_success') # depends on [control=['if'], data=[]] else: self.__last_message_id = logout_response.id if not keep_local_session: OneLogin_Saml2_Utils.delete_local_session(delete_session_cb) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif get_data and 'SAMLRequest' in get_data: logout_request = OneLogin_Saml2_Logout_Request(self.__settings, get_data['SAMLRequest']) self.__last_request = logout_request.get_xml() if not self.validate_request_signature(get_data): self.__errors.append('invalid_logout_request_signature') self.__errors.append('Signature validation failed. Logout Request rejected') # depends on [control=['if'], data=[]] elif not logout_request.is_valid(self.__request_data): self.__errors.append('invalid_logout_request') self.__error_reason = logout_request.get_error() # depends on [control=['if'], data=[]] else: if not keep_local_session: OneLogin_Saml2_Utils.delete_local_session(delete_session_cb) # depends on [control=['if'], data=[]] in_response_to = logout_request.id self.__last_message_id = logout_request.id response_builder = OneLogin_Saml2_Logout_Response(self.__settings) response_builder.build(in_response_to) self.__last_response = response_builder.get_xml() logout_response = response_builder.get_response() parameters = {'SAMLResponse': logout_response} if 'RelayState' in self.__request_data['get_data']: parameters['RelayState'] = self.__request_data['get_data']['RelayState'] # depends on [control=['if'], data=[]] security = self.__settings.get_security_data() if security['logoutResponseSigned']: self.add_response_signature(parameters, security['signatureAlgorithm']) # depends on [control=['if'], data=[]] return self.redirect_to(self.get_slo_url(), parameters) # depends on [control=['if'], data=[]] else: self.__errors.append('invalid_binding') raise OneLogin_Saml2_Error('SAML LogoutRequest/LogoutResponse not found. Only supported HTTP_REDIRECT Binding', OneLogin_Saml2_Error.SAML_LOGOUTMESSAGE_NOT_FOUND)
def encrypt_seal(self, data: Union[str, bytes]) -> bytes: """ Encrypt data with a curve25519 version of the ed25519 public key :param data: Bytes data to encrypt """ curve25519_public_key = libnacl.crypto_sign_ed25519_pk_to_curve25519(self.pk) return libnacl.crypto_box_seal(ensure_bytes(data), curve25519_public_key)
def function[encrypt_seal, parameter[self, data]]: constant[ Encrypt data with a curve25519 version of the ed25519 public key :param data: Bytes data to encrypt ] variable[curve25519_public_key] assign[=] call[name[libnacl].crypto_sign_ed25519_pk_to_curve25519, parameter[name[self].pk]] return[call[name[libnacl].crypto_box_seal, parameter[call[name[ensure_bytes], parameter[name[data]]], name[curve25519_public_key]]]]
keyword[def] identifier[encrypt_seal] ( identifier[self] , identifier[data] : identifier[Union] [ identifier[str] , identifier[bytes] ])-> identifier[bytes] : literal[string] identifier[curve25519_public_key] = identifier[libnacl] . identifier[crypto_sign_ed25519_pk_to_curve25519] ( identifier[self] . identifier[pk] ) keyword[return] identifier[libnacl] . identifier[crypto_box_seal] ( identifier[ensure_bytes] ( identifier[data] ), identifier[curve25519_public_key] )
def encrypt_seal(self, data: Union[str, bytes]) -> bytes: """ Encrypt data with a curve25519 version of the ed25519 public key :param data: Bytes data to encrypt """ curve25519_public_key = libnacl.crypto_sign_ed25519_pk_to_curve25519(self.pk) return libnacl.crypto_box_seal(ensure_bytes(data), curve25519_public_key)
def add_son_manipulator(self, manipulator): """Add a new son manipulator to this database. **DEPRECATED** - `add_son_manipulator` is deprecated. .. versionchanged:: 3.0 Deprecated add_son_manipulator. """ warnings.warn("add_son_manipulator is deprecated", DeprecationWarning, stacklevel=2) base = SONManipulator() def method_overwritten(instance, method): """Test if this method has been overridden.""" return (getattr( instance, method).__func__ != getattr(base, method).__func__) if manipulator.will_copy(): if method_overwritten(manipulator, "transform_incoming"): self.__incoming_copying_manipulators.insert(0, manipulator) if method_overwritten(manipulator, "transform_outgoing"): self.__outgoing_copying_manipulators.insert(0, manipulator) else: if method_overwritten(manipulator, "transform_incoming"): self.__incoming_manipulators.insert(0, manipulator) if method_overwritten(manipulator, "transform_outgoing"): self.__outgoing_manipulators.insert(0, manipulator)
def function[add_son_manipulator, parameter[self, manipulator]]: constant[Add a new son manipulator to this database. **DEPRECATED** - `add_son_manipulator` is deprecated. .. versionchanged:: 3.0 Deprecated add_son_manipulator. ] call[name[warnings].warn, parameter[constant[add_son_manipulator is deprecated], name[DeprecationWarning]]] variable[base] assign[=] call[name[SONManipulator], parameter[]] def function[method_overwritten, parameter[instance, method]]: constant[Test if this method has been overridden.] return[compare[call[name[getattr], parameter[name[instance], name[method]]].__func__ not_equal[!=] call[name[getattr], parameter[name[base], name[method]]].__func__]] if call[name[manipulator].will_copy, parameter[]] begin[:] if call[name[method_overwritten], parameter[name[manipulator], constant[transform_incoming]]] begin[:] call[name[self].__incoming_copying_manipulators.insert, parameter[constant[0], name[manipulator]]] if call[name[method_overwritten], parameter[name[manipulator], constant[transform_outgoing]]] begin[:] call[name[self].__outgoing_copying_manipulators.insert, parameter[constant[0], name[manipulator]]]
keyword[def] identifier[add_son_manipulator] ( identifier[self] , identifier[manipulator] ): literal[string] identifier[warnings] . identifier[warn] ( literal[string] , identifier[DeprecationWarning] , identifier[stacklevel] = literal[int] ) identifier[base] = identifier[SONManipulator] () keyword[def] identifier[method_overwritten] ( identifier[instance] , identifier[method] ): literal[string] keyword[return] ( identifier[getattr] ( identifier[instance] , identifier[method] ). identifier[__func__] != identifier[getattr] ( identifier[base] , identifier[method] ). identifier[__func__] ) keyword[if] identifier[manipulator] . identifier[will_copy] (): keyword[if] identifier[method_overwritten] ( identifier[manipulator] , literal[string] ): identifier[self] . identifier[__incoming_copying_manipulators] . identifier[insert] ( literal[int] , identifier[manipulator] ) keyword[if] identifier[method_overwritten] ( identifier[manipulator] , literal[string] ): identifier[self] . identifier[__outgoing_copying_manipulators] . identifier[insert] ( literal[int] , identifier[manipulator] ) keyword[else] : keyword[if] identifier[method_overwritten] ( identifier[manipulator] , literal[string] ): identifier[self] . identifier[__incoming_manipulators] . identifier[insert] ( literal[int] , identifier[manipulator] ) keyword[if] identifier[method_overwritten] ( identifier[manipulator] , literal[string] ): identifier[self] . identifier[__outgoing_manipulators] . identifier[insert] ( literal[int] , identifier[manipulator] )
def add_son_manipulator(self, manipulator): """Add a new son manipulator to this database. **DEPRECATED** - `add_son_manipulator` is deprecated. .. versionchanged:: 3.0 Deprecated add_son_manipulator. """ warnings.warn('add_son_manipulator is deprecated', DeprecationWarning, stacklevel=2) base = SONManipulator() def method_overwritten(instance, method): """Test if this method has been overridden.""" return getattr(instance, method).__func__ != getattr(base, method).__func__ if manipulator.will_copy(): if method_overwritten(manipulator, 'transform_incoming'): self.__incoming_copying_manipulators.insert(0, manipulator) # depends on [control=['if'], data=[]] if method_overwritten(manipulator, 'transform_outgoing'): self.__outgoing_copying_manipulators.insert(0, manipulator) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: if method_overwritten(manipulator, 'transform_incoming'): self.__incoming_manipulators.insert(0, manipulator) # depends on [control=['if'], data=[]] if method_overwritten(manipulator, 'transform_outgoing'): self.__outgoing_manipulators.insert(0, manipulator) # depends on [control=['if'], data=[]]
def _help_preprocess(self, help, cmdname): """Hook to preprocess a help string before writing to stdout. "help" is the help string to process. "cmdname" is the canonical sub-command name for which help is being given, or None if the help is not specific to a command. By default the following template variables are interpolated in help content. (Note: these are similar to Python 2.4's string.Template interpolation but not quite.) ${name} The tool's/shell's name, i.e. 'self.name'. ${option_list} A formatted table of options for this shell/tool. ${command_list} A formatted table of available sub-commands. ${help_list} A formatted table of additional help topics (i.e. 'help_*' methods with no matching 'do_*' method). ${cmd_name} The name (and aliases) for this sub-command formatted as: "NAME (ALIAS1, ALIAS2, ...)". ${cmd_usage} A formatted usage block inferred from the command function signature. ${cmd_option_list} A formatted table of options for this sub-command. (This is only available for commands using the optparse integration, i.e. using @cmdln.option decorators or manually setting the 'optparser' attribute on the 'do_*' method.) Returns the processed help. """ preprocessors = { "${name}": self._help_preprocess_name, "${option_list}": self._help_preprocess_option_list, "${command_list}": self._help_preprocess_command_list, "${help_list}": self._help_preprocess_help_list, "${cmd_name}": self._help_preprocess_cmd_name, "${cmd_usage}": self._help_preprocess_cmd_usage, "${cmd_option_list}": self._help_preprocess_cmd_option_list, } for marker, preprocessor in preprocessors.items(): if marker in help: help = preprocessor(help, cmdname) return help
def function[_help_preprocess, parameter[self, help, cmdname]]: constant[Hook to preprocess a help string before writing to stdout. "help" is the help string to process. "cmdname" is the canonical sub-command name for which help is being given, or None if the help is not specific to a command. By default the following template variables are interpolated in help content. (Note: these are similar to Python 2.4's string.Template interpolation but not quite.) ${name} The tool's/shell's name, i.e. 'self.name'. ${option_list} A formatted table of options for this shell/tool. ${command_list} A formatted table of available sub-commands. ${help_list} A formatted table of additional help topics (i.e. 'help_*' methods with no matching 'do_*' method). ${cmd_name} The name (and aliases) for this sub-command formatted as: "NAME (ALIAS1, ALIAS2, ...)". ${cmd_usage} A formatted usage block inferred from the command function signature. ${cmd_option_list} A formatted table of options for this sub-command. (This is only available for commands using the optparse integration, i.e. using @cmdln.option decorators or manually setting the 'optparser' attribute on the 'do_*' method.) Returns the processed help. ] variable[preprocessors] assign[=] dictionary[[<ast.Constant object at 0x7da1b05dba60>, <ast.Constant object at 0x7da1b05d86d0>, <ast.Constant object at 0x7da1b05db130>, <ast.Constant object at 0x7da1b05db040>, <ast.Constant object at 0x7da1b05d8850>, <ast.Constant object at 0x7da1b05db550>, <ast.Constant object at 0x7da1b05db160>], [<ast.Attribute object at 0x7da1b05dba30>, <ast.Attribute object at 0x7da1b05d92d0>, <ast.Attribute object at 0x7da1b05d8910>, <ast.Attribute object at 0x7da1b05d8af0>, <ast.Attribute object at 0x7da1b05db940>, <ast.Attribute object at 0x7da1b05d8640>, <ast.Attribute object at 0x7da1b05db700>]] for taget[tuple[[<ast.Name object at 0x7da1b05db0d0>, <ast.Name object at 0x7da1b05d8df0>]]] in starred[call[name[preprocessors].items, parameter[]]] begin[:] if compare[name[marker] in name[help]] begin[:] variable[help] assign[=] call[name[preprocessor], parameter[name[help], name[cmdname]]] return[name[help]]
keyword[def] identifier[_help_preprocess] ( identifier[self] , identifier[help] , identifier[cmdname] ): literal[string] identifier[preprocessors] ={ literal[string] : identifier[self] . identifier[_help_preprocess_name] , literal[string] : identifier[self] . identifier[_help_preprocess_option_list] , literal[string] : identifier[self] . identifier[_help_preprocess_command_list] , literal[string] : identifier[self] . identifier[_help_preprocess_help_list] , literal[string] : identifier[self] . identifier[_help_preprocess_cmd_name] , literal[string] : identifier[self] . identifier[_help_preprocess_cmd_usage] , literal[string] : identifier[self] . identifier[_help_preprocess_cmd_option_list] , } keyword[for] identifier[marker] , identifier[preprocessor] keyword[in] identifier[preprocessors] . identifier[items] (): keyword[if] identifier[marker] keyword[in] identifier[help] : identifier[help] = identifier[preprocessor] ( identifier[help] , identifier[cmdname] ) keyword[return] identifier[help]
def _help_preprocess(self, help, cmdname): """Hook to preprocess a help string before writing to stdout. "help" is the help string to process. "cmdname" is the canonical sub-command name for which help is being given, or None if the help is not specific to a command. By default the following template variables are interpolated in help content. (Note: these are similar to Python 2.4's string.Template interpolation but not quite.) ${name} The tool's/shell's name, i.e. 'self.name'. ${option_list} A formatted table of options for this shell/tool. ${command_list} A formatted table of available sub-commands. ${help_list} A formatted table of additional help topics (i.e. 'help_*' methods with no matching 'do_*' method). ${cmd_name} The name (and aliases) for this sub-command formatted as: "NAME (ALIAS1, ALIAS2, ...)". ${cmd_usage} A formatted usage block inferred from the command function signature. ${cmd_option_list} A formatted table of options for this sub-command. (This is only available for commands using the optparse integration, i.e. using @cmdln.option decorators or manually setting the 'optparser' attribute on the 'do_*' method.) Returns the processed help. """ preprocessors = {'${name}': self._help_preprocess_name, '${option_list}': self._help_preprocess_option_list, '${command_list}': self._help_preprocess_command_list, '${help_list}': self._help_preprocess_help_list, '${cmd_name}': self._help_preprocess_cmd_name, '${cmd_usage}': self._help_preprocess_cmd_usage, '${cmd_option_list}': self._help_preprocess_cmd_option_list} for (marker, preprocessor) in preprocessors.items(): if marker in help: help = preprocessor(help, cmdname) # depends on [control=['if'], data=['help']] # depends on [control=['for'], data=[]] return help
def icohpvalue(self, spin=Spin.up): """ Args: spin: Spin.up or Spin.down Returns: icohpvalue (float) corresponding to chosen spin """ if not self.is_spin_polarized and spin == Spin.down: raise ValueError("The calculation was not performed with spin polarization") return self._icohp[spin]
def function[icohpvalue, parameter[self, spin]]: constant[ Args: spin: Spin.up or Spin.down Returns: icohpvalue (float) corresponding to chosen spin ] if <ast.BoolOp object at 0x7da18f810c70> begin[:] <ast.Raise object at 0x7da18f813d60> return[call[name[self]._icohp][name[spin]]]
keyword[def] identifier[icohpvalue] ( identifier[self] , identifier[spin] = identifier[Spin] . identifier[up] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[is_spin_polarized] keyword[and] identifier[spin] == identifier[Spin] . identifier[down] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] identifier[self] . identifier[_icohp] [ identifier[spin] ]
def icohpvalue(self, spin=Spin.up): """ Args: spin: Spin.up or Spin.down Returns: icohpvalue (float) corresponding to chosen spin """ if not self.is_spin_polarized and spin == Spin.down: raise ValueError('The calculation was not performed with spin polarization') # depends on [control=['if'], data=[]] return self._icohp[spin]
def _max_form(f, colname): """ Assumes dataframe with hierarchical columns with first index equal to the use and second index equal to the attribute. e.g. f.columns equal to:: mixedoffice building_cost building_revenue building_size max_profit max_profit_far total_cost industrial building_cost building_revenue building_size max_profit max_profit_far total_cost """ df = f.stack(level=0)[[colname]].stack().unstack(level=1).reset_index(level=1, drop=True) return df.idxmax(axis=1)
def function[_max_form, parameter[f, colname]]: constant[ Assumes dataframe with hierarchical columns with first index equal to the use and second index equal to the attribute. e.g. f.columns equal to:: mixedoffice building_cost building_revenue building_size max_profit max_profit_far total_cost industrial building_cost building_revenue building_size max_profit max_profit_far total_cost ] variable[df] assign[=] call[call[call[call[call[name[f].stack, parameter[]]][list[[<ast.Name object at 0x7da18f09d6c0>]]].stack, parameter[]].unstack, parameter[]].reset_index, parameter[]] return[call[name[df].idxmax, parameter[]]]
keyword[def] identifier[_max_form] ( identifier[f] , identifier[colname] ): literal[string] identifier[df] = identifier[f] . identifier[stack] ( identifier[level] = literal[int] )[[ identifier[colname] ]]. identifier[stack] (). identifier[unstack] ( identifier[level] = literal[int] ). identifier[reset_index] ( identifier[level] = literal[int] , identifier[drop] = keyword[True] ) keyword[return] identifier[df] . identifier[idxmax] ( identifier[axis] = literal[int] )
def _max_form(f, colname): """ Assumes dataframe with hierarchical columns with first index equal to the use and second index equal to the attribute. e.g. f.columns equal to:: mixedoffice building_cost building_revenue building_size max_profit max_profit_far total_cost industrial building_cost building_revenue building_size max_profit max_profit_far total_cost """ df = f.stack(level=0)[[colname]].stack().unstack(level=1).reset_index(level=1, drop=True) return df.idxmax(axis=1)
def jwt_optional(fn): """ If you decorate a view with this, it will check the request for a valid JWT and put it into the Flask application context before calling the view. If no authorization header is present, the view will be called without the application context being changed. Other authentication errors are not affected. For example, if an expired JWT is passed in, it will still not be able to access an endpoint protected by this decorator. :param fn: The view function to decorate """ @wraps(fn) def wrapper(*args, **kwargs): try: jwt_data = _decode_jwt_from_headers() ctx_stack.top.jwt = jwt_data except (NoAuthorizationError, InvalidHeaderError): pass return fn(*args, **kwargs) return wrapper
def function[jwt_optional, parameter[fn]]: constant[ If you decorate a view with this, it will check the request for a valid JWT and put it into the Flask application context before calling the view. If no authorization header is present, the view will be called without the application context being changed. Other authentication errors are not affected. For example, if an expired JWT is passed in, it will still not be able to access an endpoint protected by this decorator. :param fn: The view function to decorate ] def function[wrapper, parameter[]]: <ast.Try object at 0x7da1b1037f40> return[call[name[fn], parameter[<ast.Starred object at 0x7da1b10351e0>]]] return[name[wrapper]]
keyword[def] identifier[jwt_optional] ( identifier[fn] ): literal[string] @ identifier[wraps] ( identifier[fn] ) keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ): keyword[try] : identifier[jwt_data] = identifier[_decode_jwt_from_headers] () identifier[ctx_stack] . identifier[top] . identifier[jwt] = identifier[jwt_data] keyword[except] ( identifier[NoAuthorizationError] , identifier[InvalidHeaderError] ): keyword[pass] keyword[return] identifier[fn] (* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[wrapper]
def jwt_optional(fn): """ If you decorate a view with this, it will check the request for a valid JWT and put it into the Flask application context before calling the view. If no authorization header is present, the view will be called without the application context being changed. Other authentication errors are not affected. For example, if an expired JWT is passed in, it will still not be able to access an endpoint protected by this decorator. :param fn: The view function to decorate """ @wraps(fn) def wrapper(*args, **kwargs): try: jwt_data = _decode_jwt_from_headers() ctx_stack.top.jwt = jwt_data # depends on [control=['try'], data=[]] except (NoAuthorizationError, InvalidHeaderError): pass # depends on [control=['except'], data=[]] return fn(*args, **kwargs) return wrapper
def crop(self, height, width, center_i=None, center_j=None): """Crop the image centered around center_i, center_j. Parameters ---------- height : int The height of the desired image. width : int The width of the desired image. center_i : int The center height point at which to crop. If not specified, the center of the image is used. center_j : int The center width point at which to crop. If not specified, the center of the image is used. Returns ------- :obj:`Image` A cropped Image of the same type. """ color_im_crop = self.color.crop(height, width, center_i, center_j) depth_im_crop = self.depth.crop(height, width, center_i, center_j) return RgbdImage.from_color_and_depth(color_im_crop, depth_im_crop)
def function[crop, parameter[self, height, width, center_i, center_j]]: constant[Crop the image centered around center_i, center_j. Parameters ---------- height : int The height of the desired image. width : int The width of the desired image. center_i : int The center height point at which to crop. If not specified, the center of the image is used. center_j : int The center width point at which to crop. If not specified, the center of the image is used. Returns ------- :obj:`Image` A cropped Image of the same type. ] variable[color_im_crop] assign[=] call[name[self].color.crop, parameter[name[height], name[width], name[center_i], name[center_j]]] variable[depth_im_crop] assign[=] call[name[self].depth.crop, parameter[name[height], name[width], name[center_i], name[center_j]]] return[call[name[RgbdImage].from_color_and_depth, parameter[name[color_im_crop], name[depth_im_crop]]]]
keyword[def] identifier[crop] ( identifier[self] , identifier[height] , identifier[width] , identifier[center_i] = keyword[None] , identifier[center_j] = keyword[None] ): literal[string] identifier[color_im_crop] = identifier[self] . identifier[color] . identifier[crop] ( identifier[height] , identifier[width] , identifier[center_i] , identifier[center_j] ) identifier[depth_im_crop] = identifier[self] . identifier[depth] . identifier[crop] ( identifier[height] , identifier[width] , identifier[center_i] , identifier[center_j] ) keyword[return] identifier[RgbdImage] . identifier[from_color_and_depth] ( identifier[color_im_crop] , identifier[depth_im_crop] )
def crop(self, height, width, center_i=None, center_j=None): """Crop the image centered around center_i, center_j. Parameters ---------- height : int The height of the desired image. width : int The width of the desired image. center_i : int The center height point at which to crop. If not specified, the center of the image is used. center_j : int The center width point at which to crop. If not specified, the center of the image is used. Returns ------- :obj:`Image` A cropped Image of the same type. """ color_im_crop = self.color.crop(height, width, center_i, center_j) depth_im_crop = self.depth.crop(height, width, center_i, center_j) return RgbdImage.from_color_and_depth(color_im_crop, depth_im_crop)
def get_time_delta(time_string: str) -> timedelta: """ Takes a time string (1 hours, 10 days, etc.) and returns a python timedelta object :param time_string: the time value to convert to a timedelta :type time_string: str :returns: datetime.timedelta for relative time :type datetime.timedelta """ rel_time: Pattern = re.compile( pattern=r"((?P<hours>\d+?)\s+hour)?((?P<minutes>\d+?)\s+minute)?((?P<seconds>\d+?)\s+second)?((?P<days>\d+?)\s+day)?", # noqa flags=re.IGNORECASE, ) parts: Optional[Match[AnyStr]] = rel_time.match(string=time_string) if not parts: raise Exception(f"Invalid relative time: {time_string}") # https://docs.python.org/3/library/re.html#re.Match.groupdict parts: Dict[str, str] = parts.groupdict() time_params = {} if all(value == None for value in parts.values()): raise Exception(f"Invalid relative time: {time_string}") for time_unit, magnitude in parts.items(): if magnitude: time_params[time_unit]: int = int(magnitude) return timedelta(**time_params)
def function[get_time_delta, parameter[time_string]]: constant[ Takes a time string (1 hours, 10 days, etc.) and returns a python timedelta object :param time_string: the time value to convert to a timedelta :type time_string: str :returns: datetime.timedelta for relative time :type datetime.timedelta ] <ast.AnnAssign object at 0x7da20e955f90> <ast.AnnAssign object at 0x7da20e955f30> if <ast.UnaryOp object at 0x7da20e954640> begin[:] <ast.Raise object at 0x7da20e956380> <ast.AnnAssign object at 0x7da2044c29e0> variable[time_params] assign[=] dictionary[[], []] if call[name[all], parameter[<ast.GeneratorExp object at 0x7da2044c0be0>]] begin[:] <ast.Raise object at 0x7da2044c3d60> for taget[tuple[[<ast.Name object at 0x7da20c991f30>, <ast.Name object at 0x7da20c992620>]]] in starred[call[name[parts].items, parameter[]]] begin[:] if name[magnitude] begin[:] <ast.AnnAssign object at 0x7da20c991c30> return[call[name[timedelta], parameter[]]]
keyword[def] identifier[get_time_delta] ( identifier[time_string] : identifier[str] )-> identifier[timedelta] : literal[string] identifier[rel_time] : identifier[Pattern] = identifier[re] . identifier[compile] ( identifier[pattern] = literal[string] , identifier[flags] = identifier[re] . identifier[IGNORECASE] , ) identifier[parts] : identifier[Optional] [ identifier[Match] [ identifier[AnyStr] ]]= identifier[rel_time] . identifier[match] ( identifier[string] = identifier[time_string] ) keyword[if] keyword[not] identifier[parts] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[parts] : identifier[Dict] [ identifier[str] , identifier[str] ]= identifier[parts] . identifier[groupdict] () identifier[time_params] ={} keyword[if] identifier[all] ( identifier[value] == keyword[None] keyword[for] identifier[value] keyword[in] identifier[parts] . identifier[values] ()): keyword[raise] identifier[Exception] ( literal[string] ) keyword[for] identifier[time_unit] , identifier[magnitude] keyword[in] identifier[parts] . identifier[items] (): keyword[if] identifier[magnitude] : identifier[time_params] [ identifier[time_unit] ]: identifier[int] = identifier[int] ( identifier[magnitude] ) keyword[return] identifier[timedelta] (** identifier[time_params] )
def get_time_delta(time_string: str) -> timedelta: """ Takes a time string (1 hours, 10 days, etc.) and returns a python timedelta object :param time_string: the time value to convert to a timedelta :type time_string: str :returns: datetime.timedelta for relative time :type datetime.timedelta """ # noqa rel_time: Pattern = re.compile(pattern='((?P<hours>\\d+?)\\s+hour)?((?P<minutes>\\d+?)\\s+minute)?((?P<seconds>\\d+?)\\s+second)?((?P<days>\\d+?)\\s+day)?', flags=re.IGNORECASE) parts: Optional[Match[AnyStr]] = rel_time.match(string=time_string) if not parts: raise Exception(f'Invalid relative time: {time_string}') # depends on [control=['if'], data=[]] # https://docs.python.org/3/library/re.html#re.Match.groupdict parts: Dict[str, str] = parts.groupdict() time_params = {} if all((value == None for value in parts.values())): raise Exception(f'Invalid relative time: {time_string}') # depends on [control=['if'], data=[]] for (time_unit, magnitude) in parts.items(): if magnitude: time_params[time_unit]: int = int(magnitude) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return timedelta(**time_params)
def fmt_account(account, title=None): """Format an Account or a DirectedAccount.""" if title is None: title = account.__class__.__name__ # `Account` or `DirectedAccount` title = '{} ({} causal link{})'.format( title, len(account), '' if len(account) == 1 else 's') body = '' body += 'Irreducible effects\n' body += '\n'.join(fmt_ac_ria(m) for m in account.irreducible_effects) body += '\nIrreducible causes\n' body += '\n'.join(fmt_ac_ria(m) for m in account.irreducible_causes) return '\n' + header(title, body, under_char='*')
def function[fmt_account, parameter[account, title]]: constant[Format an Account or a DirectedAccount.] if compare[name[title] is constant[None]] begin[:] variable[title] assign[=] name[account].__class__.__name__ variable[title] assign[=] call[constant[{} ({} causal link{})].format, parameter[name[title], call[name[len], parameter[name[account]]], <ast.IfExp object at 0x7da18c4cf6d0>]] variable[body] assign[=] constant[] <ast.AugAssign object at 0x7da18c4cc1c0> <ast.AugAssign object at 0x7da18c4cd960> <ast.AugAssign object at 0x7da18c4cc670> <ast.AugAssign object at 0x7da18c4cf7f0> return[binary_operation[constant[ ] + call[name[header], parameter[name[title], name[body]]]]]
keyword[def] identifier[fmt_account] ( identifier[account] , identifier[title] = keyword[None] ): literal[string] keyword[if] identifier[title] keyword[is] keyword[None] : identifier[title] = identifier[account] . identifier[__class__] . identifier[__name__] identifier[title] = literal[string] . identifier[format] ( identifier[title] , identifier[len] ( identifier[account] ), literal[string] keyword[if] identifier[len] ( identifier[account] )== literal[int] keyword[else] literal[string] ) identifier[body] = literal[string] identifier[body] += literal[string] identifier[body] += literal[string] . identifier[join] ( identifier[fmt_ac_ria] ( identifier[m] ) keyword[for] identifier[m] keyword[in] identifier[account] . identifier[irreducible_effects] ) identifier[body] += literal[string] identifier[body] += literal[string] . identifier[join] ( identifier[fmt_ac_ria] ( identifier[m] ) keyword[for] identifier[m] keyword[in] identifier[account] . identifier[irreducible_causes] ) keyword[return] literal[string] + identifier[header] ( identifier[title] , identifier[body] , identifier[under_char] = literal[string] )
def fmt_account(account, title=None): """Format an Account or a DirectedAccount.""" if title is None: title = account.__class__.__name__ # `Account` or `DirectedAccount` # depends on [control=['if'], data=['title']] title = '{} ({} causal link{})'.format(title, len(account), '' if len(account) == 1 else 's') body = '' body += 'Irreducible effects\n' body += '\n'.join((fmt_ac_ria(m) for m in account.irreducible_effects)) body += '\nIrreducible causes\n' body += '\n'.join((fmt_ac_ria(m) for m in account.irreducible_causes)) return '\n' + header(title, body, under_char='*')
def gone_assignments(self): ''' Returns the list of past assignments the user did not submit for before the hard deadline. ''' # Include only assignments with past hard deadline qs = Assignment.objects.filter(hard_deadline__lt=timezone.now()) # Include only assignments from courses this user is registered for qs = qs.filter(course__in=self.user_courses()) # Include only assignments this user has no submission for return qs.order_by('-hard_deadline')
def function[gone_assignments, parameter[self]]: constant[ Returns the list of past assignments the user did not submit for before the hard deadline. ] variable[qs] assign[=] call[name[Assignment].objects.filter, parameter[]] variable[qs] assign[=] call[name[qs].filter, parameter[]] return[call[name[qs].order_by, parameter[constant[-hard_deadline]]]]
keyword[def] identifier[gone_assignments] ( identifier[self] ): literal[string] identifier[qs] = identifier[Assignment] . identifier[objects] . identifier[filter] ( identifier[hard_deadline__lt] = identifier[timezone] . identifier[now] ()) identifier[qs] = identifier[qs] . identifier[filter] ( identifier[course__in] = identifier[self] . identifier[user_courses] ()) keyword[return] identifier[qs] . identifier[order_by] ( literal[string] )
def gone_assignments(self): """ Returns the list of past assignments the user did not submit for before the hard deadline. """ # Include only assignments with past hard deadline qs = Assignment.objects.filter(hard_deadline__lt=timezone.now()) # Include only assignments from courses this user is registered for qs = qs.filter(course__in=self.user_courses()) # Include only assignments this user has no submission for return qs.order_by('-hard_deadline')
def rotateImage(image, angle): """ rotates a 2d array to a multiple of 90 deg. 0 = default 1 = 90 deg. cw 2 = 180 deg. 3 = 90 deg. ccw """ image = [list(row) for row in image] for n in range(angle % 4): image = list(zip(*image[::-1])) return image
def function[rotateImage, parameter[image, angle]]: constant[ rotates a 2d array to a multiple of 90 deg. 0 = default 1 = 90 deg. cw 2 = 180 deg. 3 = 90 deg. ccw ] variable[image] assign[=] <ast.ListComp object at 0x7da18dc9b670> for taget[name[n]] in starred[call[name[range], parameter[binary_operation[name[angle] <ast.Mod object at 0x7da2590d6920> constant[4]]]]] begin[:] variable[image] assign[=] call[name[list], parameter[call[name[zip], parameter[<ast.Starred object at 0x7da18dc999c0>]]]] return[name[image]]
keyword[def] identifier[rotateImage] ( identifier[image] , identifier[angle] ): literal[string] identifier[image] =[ identifier[list] ( identifier[row] ) keyword[for] identifier[row] keyword[in] identifier[image] ] keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[angle] % literal[int] ): identifier[image] = identifier[list] ( identifier[zip] (* identifier[image] [::- literal[int] ])) keyword[return] identifier[image]
def rotateImage(image, angle): """ rotates a 2d array to a multiple of 90 deg. 0 = default 1 = 90 deg. cw 2 = 180 deg. 3 = 90 deg. ccw """ image = [list(row) for row in image] for n in range(angle % 4): image = list(zip(*image[::-1])) # depends on [control=['for'], data=[]] return image
def add_namespace_uri(self, ns_uri, prefix=None, schema_location=None): """Adds a new namespace to this set, optionally with a prefix and schema location URI. If the namespace already exists, the given prefix and schema location are merged with the existing entry: * If non-None, ``prefix`` is added to the set. The preferred prefix is not modified. * If a schema location is not already associated with the namespace, it is set to ``schema_location`` (if given). If the namespace doesn't already exist in this set (so a new one is being created) and a prefix is given, that prefix becomes preferred. If not given, a preference as a default namespace is used. Args: ns_uri (str): The URI of the new namespace prefix (str): The desired prefix for the new namespace (optional) schema_location (str): The desired schema location for the new namespace (optional). Raises: DuplicatePrefixError: If a prefix is given which already maps to a different namespace ConflictingSchemaLocationError: If a schema location is given and the namespace already exists in this set with a different schema location. """ assert ns_uri if ns_uri in self.__ns_uri_map: # We have a _NamespaceInfo object for this URI already. So this # is a merge operation. # # We modify a copy of the real _NamespaceInfo so that we are # exception-safe: if something goes wrong, we don't end up with a # half-changed NamespaceSet. ni = self.__lookup_uri(ns_uri) new_ni = copy.deepcopy(ni) # Reconcile prefixes if prefix: self.__check_prefix_conflict(ni, prefix) new_ni.prefixes.add(prefix) self.__merge_schema_locations(new_ni, schema_location) # At this point, we have a legit new_ni object. Now we update # the set, ensuring our invariants. This should replace # all instances of the old ni in this set. for p in new_ni.prefixes: self.__prefix_map[p] = new_ni self.__ns_uri_map[new_ni.uri] = new_ni else: # A brand new namespace. The incoming prefix should not exist at # all in the prefix map. if prefix: self.__check_prefix_conflict(ns_uri, prefix) ni = _NamespaceInfo(ns_uri, prefix, schema_location) self.__add_namespaceinfo(ni)
def function[add_namespace_uri, parameter[self, ns_uri, prefix, schema_location]]: constant[Adds a new namespace to this set, optionally with a prefix and schema location URI. If the namespace already exists, the given prefix and schema location are merged with the existing entry: * If non-None, ``prefix`` is added to the set. The preferred prefix is not modified. * If a schema location is not already associated with the namespace, it is set to ``schema_location`` (if given). If the namespace doesn't already exist in this set (so a new one is being created) and a prefix is given, that prefix becomes preferred. If not given, a preference as a default namespace is used. Args: ns_uri (str): The URI of the new namespace prefix (str): The desired prefix for the new namespace (optional) schema_location (str): The desired schema location for the new namespace (optional). Raises: DuplicatePrefixError: If a prefix is given which already maps to a different namespace ConflictingSchemaLocationError: If a schema location is given and the namespace already exists in this set with a different schema location. ] assert[name[ns_uri]] if compare[name[ns_uri] in name[self].__ns_uri_map] begin[:] variable[ni] assign[=] call[name[self].__lookup_uri, parameter[name[ns_uri]]] variable[new_ni] assign[=] call[name[copy].deepcopy, parameter[name[ni]]] if name[prefix] begin[:] call[name[self].__check_prefix_conflict, parameter[name[ni], name[prefix]]] call[name[new_ni].prefixes.add, parameter[name[prefix]]] call[name[self].__merge_schema_locations, parameter[name[new_ni], name[schema_location]]] for taget[name[p]] in starred[name[new_ni].prefixes] begin[:] call[name[self].__prefix_map][name[p]] assign[=] name[new_ni] call[name[self].__ns_uri_map][name[new_ni].uri] assign[=] name[new_ni]
keyword[def] identifier[add_namespace_uri] ( identifier[self] , identifier[ns_uri] , identifier[prefix] = keyword[None] , identifier[schema_location] = keyword[None] ): literal[string] keyword[assert] identifier[ns_uri] keyword[if] identifier[ns_uri] keyword[in] identifier[self] . identifier[__ns_uri_map] : identifier[ni] = identifier[self] . identifier[__lookup_uri] ( identifier[ns_uri] ) identifier[new_ni] = identifier[copy] . identifier[deepcopy] ( identifier[ni] ) keyword[if] identifier[prefix] : identifier[self] . identifier[__check_prefix_conflict] ( identifier[ni] , identifier[prefix] ) identifier[new_ni] . identifier[prefixes] . identifier[add] ( identifier[prefix] ) identifier[self] . identifier[__merge_schema_locations] ( identifier[new_ni] , identifier[schema_location] ) keyword[for] identifier[p] keyword[in] identifier[new_ni] . identifier[prefixes] : identifier[self] . identifier[__prefix_map] [ identifier[p] ]= identifier[new_ni] identifier[self] . identifier[__ns_uri_map] [ identifier[new_ni] . identifier[uri] ]= identifier[new_ni] keyword[else] : keyword[if] identifier[prefix] : identifier[self] . identifier[__check_prefix_conflict] ( identifier[ns_uri] , identifier[prefix] ) identifier[ni] = identifier[_NamespaceInfo] ( identifier[ns_uri] , identifier[prefix] , identifier[schema_location] ) identifier[self] . identifier[__add_namespaceinfo] ( identifier[ni] )
def add_namespace_uri(self, ns_uri, prefix=None, schema_location=None): """Adds a new namespace to this set, optionally with a prefix and schema location URI. If the namespace already exists, the given prefix and schema location are merged with the existing entry: * If non-None, ``prefix`` is added to the set. The preferred prefix is not modified. * If a schema location is not already associated with the namespace, it is set to ``schema_location`` (if given). If the namespace doesn't already exist in this set (so a new one is being created) and a prefix is given, that prefix becomes preferred. If not given, a preference as a default namespace is used. Args: ns_uri (str): The URI of the new namespace prefix (str): The desired prefix for the new namespace (optional) schema_location (str): The desired schema location for the new namespace (optional). Raises: DuplicatePrefixError: If a prefix is given which already maps to a different namespace ConflictingSchemaLocationError: If a schema location is given and the namespace already exists in this set with a different schema location. """ assert ns_uri if ns_uri in self.__ns_uri_map: # We have a _NamespaceInfo object for this URI already. So this # is a merge operation. # # We modify a copy of the real _NamespaceInfo so that we are # exception-safe: if something goes wrong, we don't end up with a # half-changed NamespaceSet. ni = self.__lookup_uri(ns_uri) new_ni = copy.deepcopy(ni) # Reconcile prefixes if prefix: self.__check_prefix_conflict(ni, prefix) new_ni.prefixes.add(prefix) # depends on [control=['if'], data=[]] self.__merge_schema_locations(new_ni, schema_location) # At this point, we have a legit new_ni object. Now we update # the set, ensuring our invariants. This should replace # all instances of the old ni in this set. for p in new_ni.prefixes: self.__prefix_map[p] = new_ni # depends on [control=['for'], data=['p']] self.__ns_uri_map[new_ni.uri] = new_ni # depends on [control=['if'], data=['ns_uri']] else: # A brand new namespace. The incoming prefix should not exist at # all in the prefix map. if prefix: self.__check_prefix_conflict(ns_uri, prefix) # depends on [control=['if'], data=[]] ni = _NamespaceInfo(ns_uri, prefix, schema_location) self.__add_namespaceinfo(ni)
def match_alphabet(self, pattern): """Initialise the alphabet for the Bitap algorithm. Args: pattern: The text to encode. Returns: Hash of character locations. """ s = {} for char in pattern: s[char] = 0 for i in xrange(len(pattern)): s[pattern[i]] |= 1 << (len(pattern) - i - 1) return s
def function[match_alphabet, parameter[self, pattern]]: constant[Initialise the alphabet for the Bitap algorithm. Args: pattern: The text to encode. Returns: Hash of character locations. ] variable[s] assign[=] dictionary[[], []] for taget[name[char]] in starred[name[pattern]] begin[:] call[name[s]][name[char]] assign[=] constant[0] for taget[name[i]] in starred[call[name[xrange], parameter[call[name[len], parameter[name[pattern]]]]]] begin[:] <ast.AugAssign object at 0x7da1b0840ac0> return[name[s]]
keyword[def] identifier[match_alphabet] ( identifier[self] , identifier[pattern] ): literal[string] identifier[s] ={} keyword[for] identifier[char] keyword[in] identifier[pattern] : identifier[s] [ identifier[char] ]= literal[int] keyword[for] identifier[i] keyword[in] identifier[xrange] ( identifier[len] ( identifier[pattern] )): identifier[s] [ identifier[pattern] [ identifier[i] ]]|= literal[int] <<( identifier[len] ( identifier[pattern] )- identifier[i] - literal[int] ) keyword[return] identifier[s]
def match_alphabet(self, pattern): """Initialise the alphabet for the Bitap algorithm. Args: pattern: The text to encode. Returns: Hash of character locations. """ s = {} for char in pattern: s[char] = 0 # depends on [control=['for'], data=['char']] for i in xrange(len(pattern)): s[pattern[i]] |= 1 << len(pattern) - i - 1 # depends on [control=['for'], data=['i']] return s
def get_content_metadata(self, enterprise_customer): """ Return all content metadata contained in the catalogs associated with the EnterpriseCustomer. Arguments: enterprise_customer (EnterpriseCustomer): The EnterpriseCustomer to return content metadata for. Returns: list: List of dicts containing content metadata. """ content_metadata = OrderedDict() # TODO: This if block can be removed when we get rid of discovery service-based catalogs. if enterprise_customer.catalog: response = self._load_data( self.ENTERPRISE_CUSTOMER_ENDPOINT, detail_resource='courses', resource_id=str(enterprise_customer.uuid), traverse_pagination=True, ) for course in response['results']: for course_run in course['course_runs']: course_run['content_type'] = 'courserun' # Make this look like a search endpoint result. content_metadata[course_run['key']] = course_run for enterprise_customer_catalog in enterprise_customer.enterprise_customer_catalogs.all(): response = self._load_data( self.ENTERPRISE_CUSTOMER_CATALOGS_ENDPOINT, resource_id=str(enterprise_customer_catalog.uuid), traverse_pagination=True, querystring={'page_size': 1000}, ) for item in response['results']: content_id = utils.get_content_metadata_item_id(item) content_metadata[content_id] = item return content_metadata.values()
def function[get_content_metadata, parameter[self, enterprise_customer]]: constant[ Return all content metadata contained in the catalogs associated with the EnterpriseCustomer. Arguments: enterprise_customer (EnterpriseCustomer): The EnterpriseCustomer to return content metadata for. Returns: list: List of dicts containing content metadata. ] variable[content_metadata] assign[=] call[name[OrderedDict], parameter[]] if name[enterprise_customer].catalog begin[:] variable[response] assign[=] call[name[self]._load_data, parameter[name[self].ENTERPRISE_CUSTOMER_ENDPOINT]] for taget[name[course]] in starred[call[name[response]][constant[results]]] begin[:] for taget[name[course_run]] in starred[call[name[course]][constant[course_runs]]] begin[:] call[name[course_run]][constant[content_type]] assign[=] constant[courserun] call[name[content_metadata]][call[name[course_run]][constant[key]]] assign[=] name[course_run] for taget[name[enterprise_customer_catalog]] in starred[call[name[enterprise_customer].enterprise_customer_catalogs.all, parameter[]]] begin[:] variable[response] assign[=] call[name[self]._load_data, parameter[name[self].ENTERPRISE_CUSTOMER_CATALOGS_ENDPOINT]] for taget[name[item]] in starred[call[name[response]][constant[results]]] begin[:] variable[content_id] assign[=] call[name[utils].get_content_metadata_item_id, parameter[name[item]]] call[name[content_metadata]][name[content_id]] assign[=] name[item] return[call[name[content_metadata].values, parameter[]]]
keyword[def] identifier[get_content_metadata] ( identifier[self] , identifier[enterprise_customer] ): literal[string] identifier[content_metadata] = identifier[OrderedDict] () keyword[if] identifier[enterprise_customer] . identifier[catalog] : identifier[response] = identifier[self] . identifier[_load_data] ( identifier[self] . identifier[ENTERPRISE_CUSTOMER_ENDPOINT] , identifier[detail_resource] = literal[string] , identifier[resource_id] = identifier[str] ( identifier[enterprise_customer] . identifier[uuid] ), identifier[traverse_pagination] = keyword[True] , ) keyword[for] identifier[course] keyword[in] identifier[response] [ literal[string] ]: keyword[for] identifier[course_run] keyword[in] identifier[course] [ literal[string] ]: identifier[course_run] [ literal[string] ]= literal[string] identifier[content_metadata] [ identifier[course_run] [ literal[string] ]]= identifier[course_run] keyword[for] identifier[enterprise_customer_catalog] keyword[in] identifier[enterprise_customer] . identifier[enterprise_customer_catalogs] . identifier[all] (): identifier[response] = identifier[self] . identifier[_load_data] ( identifier[self] . identifier[ENTERPRISE_CUSTOMER_CATALOGS_ENDPOINT] , identifier[resource_id] = identifier[str] ( identifier[enterprise_customer_catalog] . identifier[uuid] ), identifier[traverse_pagination] = keyword[True] , identifier[querystring] ={ literal[string] : literal[int] }, ) keyword[for] identifier[item] keyword[in] identifier[response] [ literal[string] ]: identifier[content_id] = identifier[utils] . identifier[get_content_metadata_item_id] ( identifier[item] ) identifier[content_metadata] [ identifier[content_id] ]= identifier[item] keyword[return] identifier[content_metadata] . identifier[values] ()
def get_content_metadata(self, enterprise_customer): """ Return all content metadata contained in the catalogs associated with the EnterpriseCustomer. Arguments: enterprise_customer (EnterpriseCustomer): The EnterpriseCustomer to return content metadata for. Returns: list: List of dicts containing content metadata. """ content_metadata = OrderedDict() # TODO: This if block can be removed when we get rid of discovery service-based catalogs. if enterprise_customer.catalog: response = self._load_data(self.ENTERPRISE_CUSTOMER_ENDPOINT, detail_resource='courses', resource_id=str(enterprise_customer.uuid), traverse_pagination=True) for course in response['results']: for course_run in course['course_runs']: course_run['content_type'] = 'courserun' # Make this look like a search endpoint result. content_metadata[course_run['key']] = course_run # depends on [control=['for'], data=['course_run']] # depends on [control=['for'], data=['course']] # depends on [control=['if'], data=[]] for enterprise_customer_catalog in enterprise_customer.enterprise_customer_catalogs.all(): response = self._load_data(self.ENTERPRISE_CUSTOMER_CATALOGS_ENDPOINT, resource_id=str(enterprise_customer_catalog.uuid), traverse_pagination=True, querystring={'page_size': 1000}) for item in response['results']: content_id = utils.get_content_metadata_item_id(item) content_metadata[content_id] = item # depends on [control=['for'], data=['item']] # depends on [control=['for'], data=['enterprise_customer_catalog']] return content_metadata.values()
def add_suffix(string, suffix): """ Adds a suffix to a string, if the string does not already have that suffix. :param string: the string that should have a suffix added to it :param suffix: the suffix to be added to the string :return: the string with the suffix added, if it does not already end in the suffix. Otherwise, it returns the original string. """ if string[-len(suffix):] != suffix: return string + suffix else: return string
def function[add_suffix, parameter[string, suffix]]: constant[ Adds a suffix to a string, if the string does not already have that suffix. :param string: the string that should have a suffix added to it :param suffix: the suffix to be added to the string :return: the string with the suffix added, if it does not already end in the suffix. Otherwise, it returns the original string. ] if compare[call[name[string]][<ast.Slice object at 0x7da1b09e9f00>] not_equal[!=] name[suffix]] begin[:] return[binary_operation[name[string] + name[suffix]]]
keyword[def] identifier[add_suffix] ( identifier[string] , identifier[suffix] ): literal[string] keyword[if] identifier[string] [- identifier[len] ( identifier[suffix] ):]!= identifier[suffix] : keyword[return] identifier[string] + identifier[suffix] keyword[else] : keyword[return] identifier[string]
def add_suffix(string, suffix): """ Adds a suffix to a string, if the string does not already have that suffix. :param string: the string that should have a suffix added to it :param suffix: the suffix to be added to the string :return: the string with the suffix added, if it does not already end in the suffix. Otherwise, it returns the original string. """ if string[-len(suffix):] != suffix: return string + suffix # depends on [control=['if'], data=['suffix']] else: return string
def content(self): """ :returns: The text body of the message. """ # The code that follows is obviously pretty disgusting. # It seems like it might be impossible to completely replicate # the text of the original message if it has trailing whitespace message = self._content_xpb.one_(self._message_element) first_line = message.text if message.text[:2] == ' ': first_line = message.text[2:] else: log.debug("message did not have expected leading whitespace") subsequent_lines = ''.join([ html.tostring(child, encoding='unicode').replace('<br>', '\n') for child in message.iterchildren() ]) message_text = first_line + subsequent_lines if len(message_text) > 0 and message_text[-1] == ' ': message_text = message_text[:-1] else: log.debug("message did not have expected leading whitespace") return message_text
def function[content, parameter[self]]: constant[ :returns: The text body of the message. ] variable[message] assign[=] call[name[self]._content_xpb.one_, parameter[name[self]._message_element]] variable[first_line] assign[=] name[message].text if compare[call[name[message].text][<ast.Slice object at 0x7da1b2846860>] equal[==] constant[ ]] begin[:] variable[first_line] assign[=] call[name[message].text][<ast.Slice object at 0x7da1b2844760>] variable[subsequent_lines] assign[=] call[constant[].join, parameter[<ast.ListComp object at 0x7da1b26bf640>]] variable[message_text] assign[=] binary_operation[name[first_line] + name[subsequent_lines]] if <ast.BoolOp object at 0x7da1b26bf1f0> begin[:] variable[message_text] assign[=] call[name[message_text]][<ast.Slice object at 0x7da1b26beef0>] return[name[message_text]]
keyword[def] identifier[content] ( identifier[self] ): literal[string] identifier[message] = identifier[self] . identifier[_content_xpb] . identifier[one_] ( identifier[self] . identifier[_message_element] ) identifier[first_line] = identifier[message] . identifier[text] keyword[if] identifier[message] . identifier[text] [: literal[int] ]== literal[string] : identifier[first_line] = identifier[message] . identifier[text] [ literal[int] :] keyword[else] : identifier[log] . identifier[debug] ( literal[string] ) identifier[subsequent_lines] = literal[string] . identifier[join] ([ identifier[html] . identifier[tostring] ( identifier[child] , identifier[encoding] = literal[string] ). identifier[replace] ( literal[string] , literal[string] ) keyword[for] identifier[child] keyword[in] identifier[message] . identifier[iterchildren] () ]) identifier[message_text] = identifier[first_line] + identifier[subsequent_lines] keyword[if] identifier[len] ( identifier[message_text] )> literal[int] keyword[and] identifier[message_text] [- literal[int] ]== literal[string] : identifier[message_text] = identifier[message_text] [:- literal[int] ] keyword[else] : identifier[log] . identifier[debug] ( literal[string] ) keyword[return] identifier[message_text]
def content(self): """ :returns: The text body of the message. """ # The code that follows is obviously pretty disgusting. # It seems like it might be impossible to completely replicate # the text of the original message if it has trailing whitespace message = self._content_xpb.one_(self._message_element) first_line = message.text if message.text[:2] == ' ': first_line = message.text[2:] # depends on [control=['if'], data=[]] else: log.debug('message did not have expected leading whitespace') subsequent_lines = ''.join([html.tostring(child, encoding='unicode').replace('<br>', '\n') for child in message.iterchildren()]) message_text = first_line + subsequent_lines if len(message_text) > 0 and message_text[-1] == ' ': message_text = message_text[:-1] # depends on [control=['if'], data=[]] else: log.debug('message did not have expected leading whitespace') return message_text
def stroke_antialias(self, flag=True): """stroke antialias :param flag: True or False. (default is True) :type flag: bool """ antialias = pgmagick.DrawableStrokeAntialias(flag) self.drawer.append(antialias)
def function[stroke_antialias, parameter[self, flag]]: constant[stroke antialias :param flag: True or False. (default is True) :type flag: bool ] variable[antialias] assign[=] call[name[pgmagick].DrawableStrokeAntialias, parameter[name[flag]]] call[name[self].drawer.append, parameter[name[antialias]]]
keyword[def] identifier[stroke_antialias] ( identifier[self] , identifier[flag] = keyword[True] ): literal[string] identifier[antialias] = identifier[pgmagick] . identifier[DrawableStrokeAntialias] ( identifier[flag] ) identifier[self] . identifier[drawer] . identifier[append] ( identifier[antialias] )
def stroke_antialias(self, flag=True): """stroke antialias :param flag: True or False. (default is True) :type flag: bool """ antialias = pgmagick.DrawableStrokeAntialias(flag) self.drawer.append(antialias)
def GetMountpoints(data=None): """List all the filesystems mounted on the system.""" expiry = 60 # 1 min insert_time = MOUNTPOINT_CACHE[0] if insert_time + expiry > time.time(): return MOUNTPOINT_CACHE[1] devices = {} # Check all the mounted filesystems. if data is None: data = "\n".join( [open(x, "rb").read() for x in ["/proc/mounts", "/etc/mtab"]]) for line in data.splitlines(): try: device, mnt_point, fs_type, _ = line.split(" ", 3) mnt_point = os.path.normpath(mnt_point) # What if several devices are mounted on the same mount point? devices[mnt_point] = (device, fs_type) except ValueError: pass MOUNTPOINT_CACHE[0] = time.time() MOUNTPOINT_CACHE[1] = devices return devices
def function[GetMountpoints, parameter[data]]: constant[List all the filesystems mounted on the system.] variable[expiry] assign[=] constant[60] variable[insert_time] assign[=] call[name[MOUNTPOINT_CACHE]][constant[0]] if compare[binary_operation[name[insert_time] + name[expiry]] greater[>] call[name[time].time, parameter[]]] begin[:] return[call[name[MOUNTPOINT_CACHE]][constant[1]]] variable[devices] assign[=] dictionary[[], []] if compare[name[data] is constant[None]] begin[:] variable[data] assign[=] call[constant[ ].join, parameter[<ast.ListComp object at 0x7da18fe93100>]] for taget[name[line]] in starred[call[name[data].splitlines, parameter[]]] begin[:] <ast.Try object at 0x7da18fe93ee0> call[name[MOUNTPOINT_CACHE]][constant[0]] assign[=] call[name[time].time, parameter[]] call[name[MOUNTPOINT_CACHE]][constant[1]] assign[=] name[devices] return[name[devices]]
keyword[def] identifier[GetMountpoints] ( identifier[data] = keyword[None] ): literal[string] identifier[expiry] = literal[int] identifier[insert_time] = identifier[MOUNTPOINT_CACHE] [ literal[int] ] keyword[if] identifier[insert_time] + identifier[expiry] > identifier[time] . identifier[time] (): keyword[return] identifier[MOUNTPOINT_CACHE] [ literal[int] ] identifier[devices] ={} keyword[if] identifier[data] keyword[is] keyword[None] : identifier[data] = literal[string] . identifier[join] ( [ identifier[open] ( identifier[x] , literal[string] ). identifier[read] () keyword[for] identifier[x] keyword[in] [ literal[string] , literal[string] ]]) keyword[for] identifier[line] keyword[in] identifier[data] . identifier[splitlines] (): keyword[try] : identifier[device] , identifier[mnt_point] , identifier[fs_type] , identifier[_] = identifier[line] . identifier[split] ( literal[string] , literal[int] ) identifier[mnt_point] = identifier[os] . identifier[path] . identifier[normpath] ( identifier[mnt_point] ) identifier[devices] [ identifier[mnt_point] ]=( identifier[device] , identifier[fs_type] ) keyword[except] identifier[ValueError] : keyword[pass] identifier[MOUNTPOINT_CACHE] [ literal[int] ]= identifier[time] . identifier[time] () identifier[MOUNTPOINT_CACHE] [ literal[int] ]= identifier[devices] keyword[return] identifier[devices]
def GetMountpoints(data=None): """List all the filesystems mounted on the system.""" expiry = 60 # 1 min insert_time = MOUNTPOINT_CACHE[0] if insert_time + expiry > time.time(): return MOUNTPOINT_CACHE[1] # depends on [control=['if'], data=[]] devices = {} # Check all the mounted filesystems. if data is None: data = '\n'.join([open(x, 'rb').read() for x in ['/proc/mounts', '/etc/mtab']]) # depends on [control=['if'], data=['data']] for line in data.splitlines(): try: (device, mnt_point, fs_type, _) = line.split(' ', 3) mnt_point = os.path.normpath(mnt_point) # What if several devices are mounted on the same mount point? devices[mnt_point] = (device, fs_type) # depends on [control=['try'], data=[]] except ValueError: pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['line']] MOUNTPOINT_CACHE[0] = time.time() MOUNTPOINT_CACHE[1] = devices return devices
def from_value(self, instance, value): """ Convert the given value using the set `type_` and store it into `instance`’ attribute. """ try: parsed = self.type_.parse(value) except (TypeError, ValueError): if self.erroneous_as_absent: return False raise self._set_from_recv(instance, parsed) return True
def function[from_value, parameter[self, instance, value]]: constant[ Convert the given value using the set `type_` and store it into `instance`’ attribute. ] <ast.Try object at 0x7da20c6e4d90> call[name[self]._set_from_recv, parameter[name[instance], name[parsed]]] return[constant[True]]
keyword[def] identifier[from_value] ( identifier[self] , identifier[instance] , identifier[value] ): literal[string] keyword[try] : identifier[parsed] = identifier[self] . identifier[type_] . identifier[parse] ( identifier[value] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[if] identifier[self] . identifier[erroneous_as_absent] : keyword[return] keyword[False] keyword[raise] identifier[self] . identifier[_set_from_recv] ( identifier[instance] , identifier[parsed] ) keyword[return] keyword[True]
def from_value(self, instance, value): """ Convert the given value using the set `type_` and store it into `instance`’ attribute. """ try: parsed = self.type_.parse(value) # depends on [control=['try'], data=[]] except (TypeError, ValueError): if self.erroneous_as_absent: return False # depends on [control=['if'], data=[]] raise # depends on [control=['except'], data=[]] self._set_from_recv(instance, parsed) return True
def zip_cluster(data, k, init=None, max_iters=100): """ Performs hard EM clustering using the zero-inflated Poisson distribution. Args: data (array): A 2d array- genes x cells k (int): Number of clusters init (array, optional): Initial centers - genes x k array. Default: None, use kmeans++ max_iters (int, optional): Maximum number of iterations. Default: 100 Returns: assignments (array): integer assignments of cells to clusters (length cells) L (array): Poisson parameter (genes x k) M (array): zero-inflation parameter (genes x k) """ genes, cells = data.shape init, new_assignments = kmeans_pp(data+eps, k, centers=init) centers = np.copy(init) M = np.zeros(centers.shape) assignments = new_assignments for c in range(k): centers[:,c], M[:,c] = zip_fit_params_mle(data[:, assignments==c]) for it in range(max_iters): lls = zip_ll(data, centers, M) new_assignments = np.argmax(lls, 1) if np.equal(assignments, new_assignments).all(): return assignments, centers, M for c in range(k): centers[:,c], M[:,c] = zip_fit_params_mle(data[:, assignments==c]) assignments = new_assignments return assignments, centers, M
def function[zip_cluster, parameter[data, k, init, max_iters]]: constant[ Performs hard EM clustering using the zero-inflated Poisson distribution. Args: data (array): A 2d array- genes x cells k (int): Number of clusters init (array, optional): Initial centers - genes x k array. Default: None, use kmeans++ max_iters (int, optional): Maximum number of iterations. Default: 100 Returns: assignments (array): integer assignments of cells to clusters (length cells) L (array): Poisson parameter (genes x k) M (array): zero-inflation parameter (genes x k) ] <ast.Tuple object at 0x7da1b1be71f0> assign[=] name[data].shape <ast.Tuple object at 0x7da1b1be7880> assign[=] call[name[kmeans_pp], parameter[binary_operation[name[data] + name[eps]], name[k]]] variable[centers] assign[=] call[name[np].copy, parameter[name[init]]] variable[M] assign[=] call[name[np].zeros, parameter[name[centers].shape]] variable[assignments] assign[=] name[new_assignments] for taget[name[c]] in starred[call[name[range], parameter[name[k]]]] begin[:] <ast.Tuple object at 0x7da20c76e290> assign[=] call[name[zip_fit_params_mle], parameter[call[name[data]][tuple[[<ast.Slice object at 0x7da20c76c520>, <ast.Compare object at 0x7da20c76c580>]]]]] for taget[name[it]] in starred[call[name[range], parameter[name[max_iters]]]] begin[:] variable[lls] assign[=] call[name[zip_ll], parameter[name[data], name[centers], name[M]]] variable[new_assignments] assign[=] call[name[np].argmax, parameter[name[lls], constant[1]]] if call[call[name[np].equal, parameter[name[assignments], name[new_assignments]]].all, parameter[]] begin[:] return[tuple[[<ast.Name object at 0x7da1b1bb9060>, <ast.Name object at 0x7da1b1bb94b0>, <ast.Name object at 0x7da1b1bbbd00>]]] for taget[name[c]] in starred[call[name[range], parameter[name[k]]]] begin[:] <ast.Tuple object at 0x7da1b1bb8940> assign[=] call[name[zip_fit_params_mle], parameter[call[name[data]][tuple[[<ast.Slice object at 0x7da1b1be6080>, <ast.Compare object at 0x7da1b1be7820>]]]]] variable[assignments] assign[=] name[new_assignments] return[tuple[[<ast.Name object at 0x7da1b1be5060>, <ast.Name object at 0x7da1b1be6f50>, <ast.Name object at 0x7da1b1be6260>]]]
keyword[def] identifier[zip_cluster] ( identifier[data] , identifier[k] , identifier[init] = keyword[None] , identifier[max_iters] = literal[int] ): literal[string] identifier[genes] , identifier[cells] = identifier[data] . identifier[shape] identifier[init] , identifier[new_assignments] = identifier[kmeans_pp] ( identifier[data] + identifier[eps] , identifier[k] , identifier[centers] = identifier[init] ) identifier[centers] = identifier[np] . identifier[copy] ( identifier[init] ) identifier[M] = identifier[np] . identifier[zeros] ( identifier[centers] . identifier[shape] ) identifier[assignments] = identifier[new_assignments] keyword[for] identifier[c] keyword[in] identifier[range] ( identifier[k] ): identifier[centers] [:, identifier[c] ], identifier[M] [:, identifier[c] ]= identifier[zip_fit_params_mle] ( identifier[data] [:, identifier[assignments] == identifier[c] ]) keyword[for] identifier[it] keyword[in] identifier[range] ( identifier[max_iters] ): identifier[lls] = identifier[zip_ll] ( identifier[data] , identifier[centers] , identifier[M] ) identifier[new_assignments] = identifier[np] . identifier[argmax] ( identifier[lls] , literal[int] ) keyword[if] identifier[np] . identifier[equal] ( identifier[assignments] , identifier[new_assignments] ). identifier[all] (): keyword[return] identifier[assignments] , identifier[centers] , identifier[M] keyword[for] identifier[c] keyword[in] identifier[range] ( identifier[k] ): identifier[centers] [:, identifier[c] ], identifier[M] [:, identifier[c] ]= identifier[zip_fit_params_mle] ( identifier[data] [:, identifier[assignments] == identifier[c] ]) identifier[assignments] = identifier[new_assignments] keyword[return] identifier[assignments] , identifier[centers] , identifier[M]
def zip_cluster(data, k, init=None, max_iters=100): """ Performs hard EM clustering using the zero-inflated Poisson distribution. Args: data (array): A 2d array- genes x cells k (int): Number of clusters init (array, optional): Initial centers - genes x k array. Default: None, use kmeans++ max_iters (int, optional): Maximum number of iterations. Default: 100 Returns: assignments (array): integer assignments of cells to clusters (length cells) L (array): Poisson parameter (genes x k) M (array): zero-inflation parameter (genes x k) """ (genes, cells) = data.shape (init, new_assignments) = kmeans_pp(data + eps, k, centers=init) centers = np.copy(init) M = np.zeros(centers.shape) assignments = new_assignments for c in range(k): (centers[:, c], M[:, c]) = zip_fit_params_mle(data[:, assignments == c]) # depends on [control=['for'], data=['c']] for it in range(max_iters): lls = zip_ll(data, centers, M) new_assignments = np.argmax(lls, 1) if np.equal(assignments, new_assignments).all(): return (assignments, centers, M) # depends on [control=['if'], data=[]] for c in range(k): (centers[:, c], M[:, c]) = zip_fit_params_mle(data[:, assignments == c]) # depends on [control=['for'], data=['c']] assignments = new_assignments # depends on [control=['for'], data=[]] return (assignments, centers, M)
def pixbuf_to_cairo_png(pixbuf): """Convert from PixBuf to ImageSurface, by going through the PNG format. This method is 10~30x slower than GDK but always works. """ buffer_pointer = ffi.new('gchar **') buffer_size = ffi.new('gsize *') error = ffi.new('GError **') handle_g_error(error, pixbuf.save_to_buffer( buffer_pointer, buffer_size, ffi.new('char[]', b'png'), error, ffi.new('char[]', b'compression'), ffi.new('char[]', b'0'), ffi.NULL)) png_bytes = ffi.buffer(buffer_pointer[0], buffer_size[0]) return ImageSurface.create_from_png(BytesIO(png_bytes))
def function[pixbuf_to_cairo_png, parameter[pixbuf]]: constant[Convert from PixBuf to ImageSurface, by going through the PNG format. This method is 10~30x slower than GDK but always works. ] variable[buffer_pointer] assign[=] call[name[ffi].new, parameter[constant[gchar **]]] variable[buffer_size] assign[=] call[name[ffi].new, parameter[constant[gsize *]]] variable[error] assign[=] call[name[ffi].new, parameter[constant[GError **]]] call[name[handle_g_error], parameter[name[error], call[name[pixbuf].save_to_buffer, parameter[name[buffer_pointer], name[buffer_size], call[name[ffi].new, parameter[constant[char[]], constant[b'png']]], name[error], call[name[ffi].new, parameter[constant[char[]], constant[b'compression']]], call[name[ffi].new, parameter[constant[char[]], constant[b'0']]], name[ffi].NULL]]]] variable[png_bytes] assign[=] call[name[ffi].buffer, parameter[call[name[buffer_pointer]][constant[0]], call[name[buffer_size]][constant[0]]]] return[call[name[ImageSurface].create_from_png, parameter[call[name[BytesIO], parameter[name[png_bytes]]]]]]
keyword[def] identifier[pixbuf_to_cairo_png] ( identifier[pixbuf] ): literal[string] identifier[buffer_pointer] = identifier[ffi] . identifier[new] ( literal[string] ) identifier[buffer_size] = identifier[ffi] . identifier[new] ( literal[string] ) identifier[error] = identifier[ffi] . identifier[new] ( literal[string] ) identifier[handle_g_error] ( identifier[error] , identifier[pixbuf] . identifier[save_to_buffer] ( identifier[buffer_pointer] , identifier[buffer_size] , identifier[ffi] . identifier[new] ( literal[string] , literal[string] ), identifier[error] , identifier[ffi] . identifier[new] ( literal[string] , literal[string] ), identifier[ffi] . identifier[new] ( literal[string] , literal[string] ), identifier[ffi] . identifier[NULL] )) identifier[png_bytes] = identifier[ffi] . identifier[buffer] ( identifier[buffer_pointer] [ literal[int] ], identifier[buffer_size] [ literal[int] ]) keyword[return] identifier[ImageSurface] . identifier[create_from_png] ( identifier[BytesIO] ( identifier[png_bytes] ))
def pixbuf_to_cairo_png(pixbuf): """Convert from PixBuf to ImageSurface, by going through the PNG format. This method is 10~30x slower than GDK but always works. """ buffer_pointer = ffi.new('gchar **') buffer_size = ffi.new('gsize *') error = ffi.new('GError **') handle_g_error(error, pixbuf.save_to_buffer(buffer_pointer, buffer_size, ffi.new('char[]', b'png'), error, ffi.new('char[]', b'compression'), ffi.new('char[]', b'0'), ffi.NULL)) png_bytes = ffi.buffer(buffer_pointer[0], buffer_size[0]) return ImageSurface.create_from_png(BytesIO(png_bytes))
def displayplot(data, plinds, plottype, scaling, fileroot, url_path='http://www.aoc.nrao.edu/~claw/plots'): """ Generate interactive plot """ plotdict = {'dmt': plotdmt, 'norm': plotnorm, 'loc': plotloc, 'stat': plotstat, 'all': plotall} sizedict = {'dmt': [900,500], 'norm': [700, 700], 'loc': [700,700], 'stat': [700,700]} sortinds = sorted(set(plinds['cir'] + plinds['cro'] + plinds['edg'])) sizesrc, plaw = scaling.split('_') data['sizes'] = calcsize(data[sizesrc], inds=sortinds, plaw=int(plaw)) if plottype != 'all': wid, hei = sizedict[plottype] pl = plotdict[plottype](data, circleinds=plinds['cir'], crossinds=plinds['cro'], edgeinds=plinds['edg'], url_path=url_path, fileroot=fileroot, plot_width=wid, plot_height=hei) else: pl = plotall(data, circleinds=plinds['cir'], crossinds=plinds['cro'], edgeinds=plinds['edg'], url_path=url_path, fileroot=fileroot) hdl = show(pl)
def function[displayplot, parameter[data, plinds, plottype, scaling, fileroot, url_path]]: constant[ Generate interactive plot ] variable[plotdict] assign[=] dictionary[[<ast.Constant object at 0x7da1b25d96c0>, <ast.Constant object at 0x7da1b25d9630>, <ast.Constant object at 0x7da1b25d9690>, <ast.Constant object at 0x7da1b25d9660>, <ast.Constant object at 0x7da1b25d9600>], [<ast.Name object at 0x7da1b25d9900>, <ast.Name object at 0x7da1b25d8d00>, <ast.Name object at 0x7da1b25d95a0>, <ast.Name object at 0x7da1b25d93f0>, <ast.Name object at 0x7da1b25d8d30>]] variable[sizedict] assign[=] dictionary[[<ast.Constant object at 0x7da1b25d9420>, <ast.Constant object at 0x7da1b25d9450>, <ast.Constant object at 0x7da1b25d9480>, <ast.Constant object at 0x7da1b25d94b0>], [<ast.List object at 0x7da1b25d94e0>, <ast.List object at 0x7da1b25d98d0>, <ast.List object at 0x7da18dc04be0>, <ast.List object at 0x7da18dc07e20>]] variable[sortinds] assign[=] call[name[sorted], parameter[call[name[set], parameter[binary_operation[binary_operation[call[name[plinds]][constant[cir]] + call[name[plinds]][constant[cro]]] + call[name[plinds]][constant[edg]]]]]]] <ast.Tuple object at 0x7da1b2525570> assign[=] call[name[scaling].split, parameter[constant[_]]] call[name[data]][constant[sizes]] assign[=] call[name[calcsize], parameter[call[name[data]][name[sizesrc]]]] if compare[name[plottype] not_equal[!=] constant[all]] begin[:] <ast.Tuple object at 0x7da1b26c4160> assign[=] call[name[sizedict]][name[plottype]] variable[pl] assign[=] call[call[name[plotdict]][name[plottype]], parameter[name[data]]] variable[hdl] assign[=] call[name[show], parameter[name[pl]]]
keyword[def] identifier[displayplot] ( identifier[data] , identifier[plinds] , identifier[plottype] , identifier[scaling] , identifier[fileroot] , identifier[url_path] = literal[string] ): literal[string] identifier[plotdict] ={ literal[string] : identifier[plotdmt] , literal[string] : identifier[plotnorm] , literal[string] : identifier[plotloc] , literal[string] : identifier[plotstat] , literal[string] : identifier[plotall] } identifier[sizedict] ={ literal[string] :[ literal[int] , literal[int] ], literal[string] :[ literal[int] , literal[int] ], literal[string] :[ literal[int] , literal[int] ], literal[string] :[ literal[int] , literal[int] ]} identifier[sortinds] = identifier[sorted] ( identifier[set] ( identifier[plinds] [ literal[string] ]+ identifier[plinds] [ literal[string] ]+ identifier[plinds] [ literal[string] ])) identifier[sizesrc] , identifier[plaw] = identifier[scaling] . identifier[split] ( literal[string] ) identifier[data] [ literal[string] ]= identifier[calcsize] ( identifier[data] [ identifier[sizesrc] ], identifier[inds] = identifier[sortinds] , identifier[plaw] = identifier[int] ( identifier[plaw] )) keyword[if] identifier[plottype] != literal[string] : identifier[wid] , identifier[hei] = identifier[sizedict] [ identifier[plottype] ] identifier[pl] = identifier[plotdict] [ identifier[plottype] ]( identifier[data] , identifier[circleinds] = identifier[plinds] [ literal[string] ], identifier[crossinds] = identifier[plinds] [ literal[string] ], identifier[edgeinds] = identifier[plinds] [ literal[string] ], identifier[url_path] = identifier[url_path] , identifier[fileroot] = identifier[fileroot] , identifier[plot_width] = identifier[wid] , identifier[plot_height] = identifier[hei] ) keyword[else] : identifier[pl] = identifier[plotall] ( identifier[data] , identifier[circleinds] = identifier[plinds] [ literal[string] ], identifier[crossinds] = identifier[plinds] [ literal[string] ], identifier[edgeinds] = identifier[plinds] [ literal[string] ], identifier[url_path] = identifier[url_path] , identifier[fileroot] = identifier[fileroot] ) identifier[hdl] = identifier[show] ( identifier[pl] )
def displayplot(data, plinds, plottype, scaling, fileroot, url_path='http://www.aoc.nrao.edu/~claw/plots'): """ Generate interactive plot """ plotdict = {'dmt': plotdmt, 'norm': plotnorm, 'loc': plotloc, 'stat': plotstat, 'all': plotall} sizedict = {'dmt': [900, 500], 'norm': [700, 700], 'loc': [700, 700], 'stat': [700, 700]} sortinds = sorted(set(plinds['cir'] + plinds['cro'] + plinds['edg'])) (sizesrc, plaw) = scaling.split('_') data['sizes'] = calcsize(data[sizesrc], inds=sortinds, plaw=int(plaw)) if plottype != 'all': (wid, hei) = sizedict[plottype] pl = plotdict[plottype](data, circleinds=plinds['cir'], crossinds=plinds['cro'], edgeinds=plinds['edg'], url_path=url_path, fileroot=fileroot, plot_width=wid, plot_height=hei) # depends on [control=['if'], data=['plottype']] else: pl = plotall(data, circleinds=plinds['cir'], crossinds=plinds['cro'], edgeinds=plinds['edg'], url_path=url_path, fileroot=fileroot) hdl = show(pl)
def extra_data(self): """Load token data stored in token (ignores expiry date of tokens).""" if self.token: return SecretLinkFactory.load_token(self.token, force=True)["data"] return None
def function[extra_data, parameter[self]]: constant[Load token data stored in token (ignores expiry date of tokens).] if name[self].token begin[:] return[call[call[name[SecretLinkFactory].load_token, parameter[name[self].token]]][constant[data]]] return[constant[None]]
keyword[def] identifier[extra_data] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[token] : keyword[return] identifier[SecretLinkFactory] . identifier[load_token] ( identifier[self] . identifier[token] , identifier[force] = keyword[True] )[ literal[string] ] keyword[return] keyword[None]
def extra_data(self): """Load token data stored in token (ignores expiry date of tokens).""" if self.token: return SecretLinkFactory.load_token(self.token, force=True)['data'] # depends on [control=['if'], data=[]] return None
def local_position_ned_cov_encode(self, time_boot_ms, time_utc, estimator_type, x, y, z, vx, vy, vz, ax, ay, az, covariance): ''' The filtered local position (e.g. fused computer vision and accelerometers). Coordinate frame is right-handed, Z-axis down (aeronautical frame, NED / north-east-down convention) time_boot_ms : Timestamp (milliseconds since system boot). 0 for system without monotonic timestamp (uint32_t) time_utc : Timestamp (microseconds since UNIX epoch) in UTC. 0 for unknown. Commonly filled by the precision time source of a GPS receiver. (uint64_t) estimator_type : Class id of the estimator this estimate originated from. (uint8_t) x : X Position (float) y : Y Position (float) z : Z Position (float) vx : X Speed (m/s) (float) vy : Y Speed (m/s) (float) vz : Z Speed (m/s) (float) ax : X Acceleration (m/s^2) (float) ay : Y Acceleration (m/s^2) (float) az : Z Acceleration (m/s^2) (float) covariance : Covariance matrix upper right triangular (first nine entries are the first ROW, next eight entries are the second row, etc.) (float) ''' return MAVLink_local_position_ned_cov_message(time_boot_ms, time_utc, estimator_type, x, y, z, vx, vy, vz, ax, ay, az, covariance)
def function[local_position_ned_cov_encode, parameter[self, time_boot_ms, time_utc, estimator_type, x, y, z, vx, vy, vz, ax, ay, az, covariance]]: constant[ The filtered local position (e.g. fused computer vision and accelerometers). Coordinate frame is right-handed, Z-axis down (aeronautical frame, NED / north-east-down convention) time_boot_ms : Timestamp (milliseconds since system boot). 0 for system without monotonic timestamp (uint32_t) time_utc : Timestamp (microseconds since UNIX epoch) in UTC. 0 for unknown. Commonly filled by the precision time source of a GPS receiver. (uint64_t) estimator_type : Class id of the estimator this estimate originated from. (uint8_t) x : X Position (float) y : Y Position (float) z : Z Position (float) vx : X Speed (m/s) (float) vy : Y Speed (m/s) (float) vz : Z Speed (m/s) (float) ax : X Acceleration (m/s^2) (float) ay : Y Acceleration (m/s^2) (float) az : Z Acceleration (m/s^2) (float) covariance : Covariance matrix upper right triangular (first nine entries are the first ROW, next eight entries are the second row, etc.) (float) ] return[call[name[MAVLink_local_position_ned_cov_message], parameter[name[time_boot_ms], name[time_utc], name[estimator_type], name[x], name[y], name[z], name[vx], name[vy], name[vz], name[ax], name[ay], name[az], name[covariance]]]]
keyword[def] identifier[local_position_ned_cov_encode] ( identifier[self] , identifier[time_boot_ms] , identifier[time_utc] , identifier[estimator_type] , identifier[x] , identifier[y] , identifier[z] , identifier[vx] , identifier[vy] , identifier[vz] , identifier[ax] , identifier[ay] , identifier[az] , identifier[covariance] ): literal[string] keyword[return] identifier[MAVLink_local_position_ned_cov_message] ( identifier[time_boot_ms] , identifier[time_utc] , identifier[estimator_type] , identifier[x] , identifier[y] , identifier[z] , identifier[vx] , identifier[vy] , identifier[vz] , identifier[ax] , identifier[ay] , identifier[az] , identifier[covariance] )
def local_position_ned_cov_encode(self, time_boot_ms, time_utc, estimator_type, x, y, z, vx, vy, vz, ax, ay, az, covariance): """ The filtered local position (e.g. fused computer vision and accelerometers). Coordinate frame is right-handed, Z-axis down (aeronautical frame, NED / north-east-down convention) time_boot_ms : Timestamp (milliseconds since system boot). 0 for system without monotonic timestamp (uint32_t) time_utc : Timestamp (microseconds since UNIX epoch) in UTC. 0 for unknown. Commonly filled by the precision time source of a GPS receiver. (uint64_t) estimator_type : Class id of the estimator this estimate originated from. (uint8_t) x : X Position (float) y : Y Position (float) z : Z Position (float) vx : X Speed (m/s) (float) vy : Y Speed (m/s) (float) vz : Z Speed (m/s) (float) ax : X Acceleration (m/s^2) (float) ay : Y Acceleration (m/s^2) (float) az : Z Acceleration (m/s^2) (float) covariance : Covariance matrix upper right triangular (first nine entries are the first ROW, next eight entries are the second row, etc.) (float) """ return MAVLink_local_position_ned_cov_message(time_boot_ms, time_utc, estimator_type, x, y, z, vx, vy, vz, ax, ay, az, covariance)
def serialize_operator_greater_than(self, op): """ Serializer for :meth:`SpiffWorkflow.operators.NotEqual`. Example:: <greater-than> <value>text</value> <value><attribute>foobar</attribute></value> </greater-than> """ elem = etree.Element('greater-than') return self.serialize_value_list(elem, op.args)
def function[serialize_operator_greater_than, parameter[self, op]]: constant[ Serializer for :meth:`SpiffWorkflow.operators.NotEqual`. Example:: <greater-than> <value>text</value> <value><attribute>foobar</attribute></value> </greater-than> ] variable[elem] assign[=] call[name[etree].Element, parameter[constant[greater-than]]] return[call[name[self].serialize_value_list, parameter[name[elem], name[op].args]]]
keyword[def] identifier[serialize_operator_greater_than] ( identifier[self] , identifier[op] ): literal[string] identifier[elem] = identifier[etree] . identifier[Element] ( literal[string] ) keyword[return] identifier[self] . identifier[serialize_value_list] ( identifier[elem] , identifier[op] . identifier[args] )
def serialize_operator_greater_than(self, op): """ Serializer for :meth:`SpiffWorkflow.operators.NotEqual`. Example:: <greater-than> <value>text</value> <value><attribute>foobar</attribute></value> </greater-than> """ elem = etree.Element('greater-than') return self.serialize_value_list(elem, op.args)
def _read_socket(self): """ Process incoming messages from socket. """ while True: base_bytes = self._socket.recv(BASE_SIZE) base = basemessage.parse(base_bytes) payload_bytes = self._socket.recv(base.payload_length) self._handle_message(packet.parse(base_bytes + payload_bytes))
def function[_read_socket, parameter[self]]: constant[ Process incoming messages from socket. ] while constant[True] begin[:] variable[base_bytes] assign[=] call[name[self]._socket.recv, parameter[name[BASE_SIZE]]] variable[base] assign[=] call[name[basemessage].parse, parameter[name[base_bytes]]] variable[payload_bytes] assign[=] call[name[self]._socket.recv, parameter[name[base].payload_length]] call[name[self]._handle_message, parameter[call[name[packet].parse, parameter[binary_operation[name[base_bytes] + name[payload_bytes]]]]]]
keyword[def] identifier[_read_socket] ( identifier[self] ): literal[string] keyword[while] keyword[True] : identifier[base_bytes] = identifier[self] . identifier[_socket] . identifier[recv] ( identifier[BASE_SIZE] ) identifier[base] = identifier[basemessage] . identifier[parse] ( identifier[base_bytes] ) identifier[payload_bytes] = identifier[self] . identifier[_socket] . identifier[recv] ( identifier[base] . identifier[payload_length] ) identifier[self] . identifier[_handle_message] ( identifier[packet] . identifier[parse] ( identifier[base_bytes] + identifier[payload_bytes] ))
def _read_socket(self): """ Process incoming messages from socket. """ while True: base_bytes = self._socket.recv(BASE_SIZE) base = basemessage.parse(base_bytes) payload_bytes = self._socket.recv(base.payload_length) self._handle_message(packet.parse(base_bytes + payload_bytes)) # depends on [control=['while'], data=[]]
def start(self): """ Start the installation wizard """ self.log.debug('Starting the installation process') self.browser.open(self.url) self.system_check()
def function[start, parameter[self]]: constant[ Start the installation wizard ] call[name[self].log.debug, parameter[constant[Starting the installation process]]] call[name[self].browser.open, parameter[name[self].url]] call[name[self].system_check, parameter[]]
keyword[def] identifier[start] ( identifier[self] ): literal[string] identifier[self] . identifier[log] . identifier[debug] ( literal[string] ) identifier[self] . identifier[browser] . identifier[open] ( identifier[self] . identifier[url] ) identifier[self] . identifier[system_check] ()
def start(self): """ Start the installation wizard """ self.log.debug('Starting the installation process') self.browser.open(self.url) self.system_check()
def get_log(name=None): """Return a console logger. Output may be sent to the logger using the `debug`, `info`, `warning`, `error` and `critical` methods. Parameters ---------- name : str Name of the log. References ---------- .. [1] Logging facility for Python, http://docs.python.org/library/logging.html """ if name is None: name = 'oct2py' else: name = 'oct2py.' + name log = logging.getLogger(name) log.setLevel(logging.INFO) return log
def function[get_log, parameter[name]]: constant[Return a console logger. Output may be sent to the logger using the `debug`, `info`, `warning`, `error` and `critical` methods. Parameters ---------- name : str Name of the log. References ---------- .. [1] Logging facility for Python, http://docs.python.org/library/logging.html ] if compare[name[name] is constant[None]] begin[:] variable[name] assign[=] constant[oct2py] variable[log] assign[=] call[name[logging].getLogger, parameter[name[name]]] call[name[log].setLevel, parameter[name[logging].INFO]] return[name[log]]
keyword[def] identifier[get_log] ( identifier[name] = keyword[None] ): literal[string] keyword[if] identifier[name] keyword[is] keyword[None] : identifier[name] = literal[string] keyword[else] : identifier[name] = literal[string] + identifier[name] identifier[log] = identifier[logging] . identifier[getLogger] ( identifier[name] ) identifier[log] . identifier[setLevel] ( identifier[logging] . identifier[INFO] ) keyword[return] identifier[log]
def get_log(name=None): """Return a console logger. Output may be sent to the logger using the `debug`, `info`, `warning`, `error` and `critical` methods. Parameters ---------- name : str Name of the log. References ---------- .. [1] Logging facility for Python, http://docs.python.org/library/logging.html """ if name is None: name = 'oct2py' # depends on [control=['if'], data=['name']] else: name = 'oct2py.' + name log = logging.getLogger(name) log.setLevel(logging.INFO) return log
def import_image(self, image_id, region_name): ''' a method to import an image from another AWS region https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/CopyingAMIs.html REQUIRED: aws credentials must have valid access to both regions :param image_id: string with AWS id of source image :param region_name: string with AWS region of source image :return: string with AWS id of new image ''' title = '%s.import_image' % self.__class__.__name__ # validate inputs input_fields = { 'image_id': image_id } for key, value in input_fields.items(): object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) input_fields = { 'region_name': region_name } for key, value in input_fields.items(): object_title = '%s(%s=%s)' % (title, key, str(value)) self.iam.fields.validate(value, '.%s' % key, object_title) if region_name == self.iam.region_name: raise ValueError('%s cannot import an image from the same region.' % title) # construct ec2 client connection for source region client_kwargs = { 'service_name': 'ec2', 'region_name': region_name, 'aws_access_key_id': self.iam.access_id, 'aws_secret_access_key': self.iam.secret_key } source_connection = boto3.client(**client_kwargs) # check existence of image try: response = source_connection.describe_images( ImageIds=[ image_id ] ) except: raise ValueError('Image %s does not exist in AWS region %s.' % (image_id, region_name)) if not 'Images' in response.keys(): raise ValueError('Image %s does not exist in AWS region %s.' % (image_id, region_name)) elif not response['Images'][0]: raise ValueError('Image %s does not exist in AWS region %s.' % (image_id, region_name)) # check into state of image elif not 'State' in response['Images'][0].keys(): from time import sleep from timeit import default_timer as timer self.iam.printer('Checking into the status of image %s in AWS region %s' % (image_id, region_name), flush=True) state_timeout = 0 while not 'State' in response['Images'][0].keys(): self.iam.printer('.', flush=True) sleep(3) state_timeout += 1 response = source_connection.describe_images( ImageIds=[ image_id ] ) if state_timeout > 3: raise Exception('Failure to determine status of image %s.' % image_id) self.iam.printer(' done.') image_state = response['Images'][0]['State'] # raise error if image is deregistered or otherwise invalid if image_state == 'deregistered' or image_state == 'invalid' or image_state == 'transient' or image_state == 'failed': raise Exception('Image %s in AWS region %s is %s.' % (image_id, region_name, image_state)) # wait while image is pending elif image_state == 'pending': from time import sleep from timeit import default_timer as timer self.iam.printer('Image %s is %s' % (image_id, image_state), flush=True) delay = 3 state_timeout = 0 while image_state != 'available': self.iam.printer('.', flush=True) sleep(delay) t3 = timer() response = source_connection.describe_images( ImageIds=[ image_id ] ) t4 = timer() state_timeout += 1 response_time = t4 - t3 if 3 - response_time > 0: delay = 3 - response_time else: delay = 0 if state_timeout > 300: raise Exception('Timeout. Failure initializing image %s in region %s in less than 15min' % (image_id, region_name)) image_state = response['Images'][0]['State'] self.iam.printer(' done.') # discover tags and name associated with source image try: response = source_connection.describe_images( ImageIds=[ image_id ] ) except: raise AWSConnectionError(title) image_info = response['Images'][0] # construct image details from response image_name = image_info['Name'] tag_list = self.iam.ingest(image_info['Tags']) # copy image over to current region self.iam.printer('Copying image %s from region %s.' % (image_id, region_name)) try: response = self.connection.copy_image( SourceRegion=region_name, SourceImageId=image_id, Name=image_name ) except: raise AWSConnectionError new_id = response['ImageId'] # check into state of new image self.check_image_state(new_id, wait=False) # add tags from source image to new image self.tag_image(new_id, tag_list) self.iam.printer('Tags from image %s have been added to image %s.' % (image_id, new_id)) return new_id
def function[import_image, parameter[self, image_id, region_name]]: constant[ a method to import an image from another AWS region https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/CopyingAMIs.html REQUIRED: aws credentials must have valid access to both regions :param image_id: string with AWS id of source image :param region_name: string with AWS region of source image :return: string with AWS id of new image ] variable[title] assign[=] binary_operation[constant[%s.import_image] <ast.Mod object at 0x7da2590d6920> name[self].__class__.__name__] variable[input_fields] assign[=] dictionary[[<ast.Constant object at 0x7da20c990d60>], [<ast.Name object at 0x7da20c993400>]] for taget[tuple[[<ast.Name object at 0x7da20c9926b0>, <ast.Name object at 0x7da20c9939d0>]]] in starred[call[name[input_fields].items, parameter[]]] begin[:] variable[object_title] assign[=] binary_operation[constant[%s(%s=%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c992d70>, <ast.Name object at 0x7da20c991fc0>, <ast.Call object at 0x7da20c991690>]]] call[name[self].fields.validate, parameter[name[value], binary_operation[constant[.%s] <ast.Mod object at 0x7da2590d6920> name[key]], name[object_title]]] variable[input_fields] assign[=] dictionary[[<ast.Constant object at 0x7da20c991240>], [<ast.Name object at 0x7da20c993d00>]] for taget[tuple[[<ast.Name object at 0x7da20c991720>, <ast.Name object at 0x7da20c9936a0>]]] in starred[call[name[input_fields].items, parameter[]]] begin[:] variable[object_title] assign[=] binary_operation[constant[%s(%s=%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c9924a0>, <ast.Name object at 0x7da20c9937f0>, <ast.Call object at 0x7da20c9918a0>]]] call[name[self].iam.fields.validate, parameter[name[value], binary_operation[constant[.%s] <ast.Mod object at 0x7da2590d6920> name[key]], name[object_title]]] if compare[name[region_name] equal[==] name[self].iam.region_name] begin[:] <ast.Raise object at 0x7da20c9914b0> variable[client_kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da20c9929b0>, <ast.Constant object at 0x7da20c993580>, <ast.Constant object at 0x7da20c9932e0>, <ast.Constant object at 0x7da20c993250>], [<ast.Constant object at 0x7da20c9928c0>, <ast.Name object at 0x7da20c990e50>, <ast.Attribute object at 0x7da20c992a10>, <ast.Attribute object at 0x7da20c9933d0>]] variable[source_connection] assign[=] call[name[boto3].client, parameter[]] <ast.Try object at 0x7da20c991570> if <ast.UnaryOp object at 0x7da20c990700> begin[:] <ast.Raise object at 0x7da20c993310> variable[image_state] assign[=] call[call[call[name[response]][constant[Images]]][constant[0]]][constant[State]] if <ast.BoolOp object at 0x7da18eb54be0> begin[:] <ast.Raise object at 0x7da18eb55600> <ast.Try object at 0x7da18eb549a0> variable[image_info] assign[=] call[call[name[response]][constant[Images]]][constant[0]] variable[image_name] assign[=] call[name[image_info]][constant[Name]] variable[tag_list] assign[=] call[name[self].iam.ingest, parameter[call[name[image_info]][constant[Tags]]]] call[name[self].iam.printer, parameter[binary_operation[constant[Copying image %s from region %s.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18eb560e0>, <ast.Name object at 0x7da18eb54af0>]]]]] <ast.Try object at 0x7da18eb57340> variable[new_id] assign[=] call[name[response]][constant[ImageId]] call[name[self].check_image_state, parameter[name[new_id]]] call[name[self].tag_image, parameter[name[new_id], name[tag_list]]] call[name[self].iam.printer, parameter[binary_operation[constant[Tags from image %s have been added to image %s.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18eb54940>, <ast.Name object at 0x7da18eb57e20>]]]]] return[name[new_id]]
keyword[def] identifier[import_image] ( identifier[self] , identifier[image_id] , identifier[region_name] ): literal[string] identifier[title] = literal[string] % identifier[self] . identifier[__class__] . identifier[__name__] identifier[input_fields] ={ literal[string] : identifier[image_id] } keyword[for] identifier[key] , identifier[value] keyword[in] identifier[input_fields] . identifier[items] (): identifier[object_title] = literal[string] %( identifier[title] , identifier[key] , identifier[str] ( identifier[value] )) identifier[self] . identifier[fields] . identifier[validate] ( identifier[value] , literal[string] % identifier[key] , identifier[object_title] ) identifier[input_fields] ={ literal[string] : identifier[region_name] } keyword[for] identifier[key] , identifier[value] keyword[in] identifier[input_fields] . identifier[items] (): identifier[object_title] = literal[string] %( identifier[title] , identifier[key] , identifier[str] ( identifier[value] )) identifier[self] . identifier[iam] . identifier[fields] . identifier[validate] ( identifier[value] , literal[string] % identifier[key] , identifier[object_title] ) keyword[if] identifier[region_name] == identifier[self] . identifier[iam] . identifier[region_name] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[title] ) identifier[client_kwargs] ={ literal[string] : literal[string] , literal[string] : identifier[region_name] , literal[string] : identifier[self] . identifier[iam] . identifier[access_id] , literal[string] : identifier[self] . identifier[iam] . identifier[secret_key] } identifier[source_connection] = identifier[boto3] . identifier[client] (** identifier[client_kwargs] ) keyword[try] : identifier[response] = identifier[source_connection] . identifier[describe_images] ( identifier[ImageIds] =[ identifier[image_id] ] ) keyword[except] : keyword[raise] identifier[ValueError] ( literal[string] %( identifier[image_id] , identifier[region_name] )) keyword[if] keyword[not] literal[string] keyword[in] identifier[response] . identifier[keys] (): keyword[raise] identifier[ValueError] ( literal[string] %( identifier[image_id] , identifier[region_name] )) keyword[elif] keyword[not] identifier[response] [ literal[string] ][ literal[int] ]: keyword[raise] identifier[ValueError] ( literal[string] %( identifier[image_id] , identifier[region_name] )) keyword[elif] keyword[not] literal[string] keyword[in] identifier[response] [ literal[string] ][ literal[int] ]. identifier[keys] (): keyword[from] identifier[time] keyword[import] identifier[sleep] keyword[from] identifier[timeit] keyword[import] identifier[default_timer] keyword[as] identifier[timer] identifier[self] . identifier[iam] . identifier[printer] ( literal[string] %( identifier[image_id] , identifier[region_name] ), identifier[flush] = keyword[True] ) identifier[state_timeout] = literal[int] keyword[while] keyword[not] literal[string] keyword[in] identifier[response] [ literal[string] ][ literal[int] ]. identifier[keys] (): identifier[self] . identifier[iam] . identifier[printer] ( literal[string] , identifier[flush] = keyword[True] ) identifier[sleep] ( literal[int] ) identifier[state_timeout] += literal[int] identifier[response] = identifier[source_connection] . identifier[describe_images] ( identifier[ImageIds] =[ identifier[image_id] ] ) keyword[if] identifier[state_timeout] > literal[int] : keyword[raise] identifier[Exception] ( literal[string] % identifier[image_id] ) identifier[self] . identifier[iam] . identifier[printer] ( literal[string] ) identifier[image_state] = identifier[response] [ literal[string] ][ literal[int] ][ literal[string] ] keyword[if] identifier[image_state] == literal[string] keyword[or] identifier[image_state] == literal[string] keyword[or] identifier[image_state] == literal[string] keyword[or] identifier[image_state] == literal[string] : keyword[raise] identifier[Exception] ( literal[string] %( identifier[image_id] , identifier[region_name] , identifier[image_state] )) keyword[elif] identifier[image_state] == literal[string] : keyword[from] identifier[time] keyword[import] identifier[sleep] keyword[from] identifier[timeit] keyword[import] identifier[default_timer] keyword[as] identifier[timer] identifier[self] . identifier[iam] . identifier[printer] ( literal[string] %( identifier[image_id] , identifier[image_state] ), identifier[flush] = keyword[True] ) identifier[delay] = literal[int] identifier[state_timeout] = literal[int] keyword[while] identifier[image_state] != literal[string] : identifier[self] . identifier[iam] . identifier[printer] ( literal[string] , identifier[flush] = keyword[True] ) identifier[sleep] ( identifier[delay] ) identifier[t3] = identifier[timer] () identifier[response] = identifier[source_connection] . identifier[describe_images] ( identifier[ImageIds] =[ identifier[image_id] ] ) identifier[t4] = identifier[timer] () identifier[state_timeout] += literal[int] identifier[response_time] = identifier[t4] - identifier[t3] keyword[if] literal[int] - identifier[response_time] > literal[int] : identifier[delay] = literal[int] - identifier[response_time] keyword[else] : identifier[delay] = literal[int] keyword[if] identifier[state_timeout] > literal[int] : keyword[raise] identifier[Exception] ( literal[string] %( identifier[image_id] , identifier[region_name] )) identifier[image_state] = identifier[response] [ literal[string] ][ literal[int] ][ literal[string] ] identifier[self] . identifier[iam] . identifier[printer] ( literal[string] ) keyword[try] : identifier[response] = identifier[source_connection] . identifier[describe_images] ( identifier[ImageIds] =[ identifier[image_id] ] ) keyword[except] : keyword[raise] identifier[AWSConnectionError] ( identifier[title] ) identifier[image_info] = identifier[response] [ literal[string] ][ literal[int] ] identifier[image_name] = identifier[image_info] [ literal[string] ] identifier[tag_list] = identifier[self] . identifier[iam] . identifier[ingest] ( identifier[image_info] [ literal[string] ]) identifier[self] . identifier[iam] . identifier[printer] ( literal[string] %( identifier[image_id] , identifier[region_name] )) keyword[try] : identifier[response] = identifier[self] . identifier[connection] . identifier[copy_image] ( identifier[SourceRegion] = identifier[region_name] , identifier[SourceImageId] = identifier[image_id] , identifier[Name] = identifier[image_name] ) keyword[except] : keyword[raise] identifier[AWSConnectionError] identifier[new_id] = identifier[response] [ literal[string] ] identifier[self] . identifier[check_image_state] ( identifier[new_id] , identifier[wait] = keyword[False] ) identifier[self] . identifier[tag_image] ( identifier[new_id] , identifier[tag_list] ) identifier[self] . identifier[iam] . identifier[printer] ( literal[string] %( identifier[image_id] , identifier[new_id] )) keyword[return] identifier[new_id]
def import_image(self, image_id, region_name): """ a method to import an image from another AWS region https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/CopyingAMIs.html REQUIRED: aws credentials must have valid access to both regions :param image_id: string with AWS id of source image :param region_name: string with AWS region of source image :return: string with AWS id of new image """ title = '%s.import_image' % self.__class__.__name__ # validate inputs input_fields = {'image_id': image_id} for (key, value) in input_fields.items(): object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) # depends on [control=['for'], data=[]] input_fields = {'region_name': region_name} for (key, value) in input_fields.items(): object_title = '%s(%s=%s)' % (title, key, str(value)) self.iam.fields.validate(value, '.%s' % key, object_title) # depends on [control=['for'], data=[]] if region_name == self.iam.region_name: raise ValueError('%s cannot import an image from the same region.' % title) # depends on [control=['if'], data=[]] # construct ec2 client connection for source region client_kwargs = {'service_name': 'ec2', 'region_name': region_name, 'aws_access_key_id': self.iam.access_id, 'aws_secret_access_key': self.iam.secret_key} source_connection = boto3.client(**client_kwargs) # check existence of image try: response = source_connection.describe_images(ImageIds=[image_id]) # depends on [control=['try'], data=[]] except: raise ValueError('Image %s does not exist in AWS region %s.' % (image_id, region_name)) # depends on [control=['except'], data=[]] if not 'Images' in response.keys(): raise ValueError('Image %s does not exist in AWS region %s.' % (image_id, region_name)) # depends on [control=['if'], data=[]] elif not response['Images'][0]: raise ValueError('Image %s does not exist in AWS region %s.' % (image_id, region_name)) # depends on [control=['if'], data=[]] # check into state of image elif not 'State' in response['Images'][0].keys(): from time import sleep from timeit import default_timer as timer self.iam.printer('Checking into the status of image %s in AWS region %s' % (image_id, region_name), flush=True) state_timeout = 0 while not 'State' in response['Images'][0].keys(): self.iam.printer('.', flush=True) sleep(3) state_timeout += 1 response = source_connection.describe_images(ImageIds=[image_id]) if state_timeout > 3: raise Exception('Failure to determine status of image %s.' % image_id) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] self.iam.printer(' done.') # depends on [control=['if'], data=[]] image_state = response['Images'][0]['State'] # raise error if image is deregistered or otherwise invalid if image_state == 'deregistered' or image_state == 'invalid' or image_state == 'transient' or (image_state == 'failed'): raise Exception('Image %s in AWS region %s is %s.' % (image_id, region_name, image_state)) # depends on [control=['if'], data=[]] # wait while image is pending elif image_state == 'pending': from time import sleep from timeit import default_timer as timer self.iam.printer('Image %s is %s' % (image_id, image_state), flush=True) delay = 3 state_timeout = 0 while image_state != 'available': self.iam.printer('.', flush=True) sleep(delay) t3 = timer() response = source_connection.describe_images(ImageIds=[image_id]) t4 = timer() state_timeout += 1 response_time = t4 - t3 if 3 - response_time > 0: delay = 3 - response_time # depends on [control=['if'], data=[]] else: delay = 0 if state_timeout > 300: raise Exception('Timeout. Failure initializing image %s in region %s in less than 15min' % (image_id, region_name)) # depends on [control=['if'], data=[]] image_state = response['Images'][0]['State'] # depends on [control=['while'], data=['image_state']] self.iam.printer(' done.') # depends on [control=['if'], data=['image_state']] # discover tags and name associated with source image try: response = source_connection.describe_images(ImageIds=[image_id]) # depends on [control=['try'], data=[]] except: raise AWSConnectionError(title) # depends on [control=['except'], data=[]] image_info = response['Images'][0] # construct image details from response image_name = image_info['Name'] tag_list = self.iam.ingest(image_info['Tags']) # copy image over to current region self.iam.printer('Copying image %s from region %s.' % (image_id, region_name)) try: response = self.connection.copy_image(SourceRegion=region_name, SourceImageId=image_id, Name=image_name) # depends on [control=['try'], data=[]] except: raise AWSConnectionError # depends on [control=['except'], data=[]] new_id = response['ImageId'] # check into state of new image self.check_image_state(new_id, wait=False) # add tags from source image to new image self.tag_image(new_id, tag_list) self.iam.printer('Tags from image %s have been added to image %s.' % (image_id, new_id)) return new_id
def cli(env, identifier, keys, permissions, hardware, virtual, logins, events): """User details.""" mgr = SoftLayer.UserManager(env.client) user_id = helpers.resolve_id(mgr.resolve_ids, identifier, 'username') object_mask = "userStatus[name], parent[id, username], apiAuthenticationKeys[authenticationKey], "\ "unsuccessfulLogins, successfulLogins" user = mgr.get_user(user_id, object_mask) env.fout(basic_info(user, keys)) if permissions: perms = mgr.get_user_permissions(user_id) env.fout(print_permissions(perms)) if hardware: mask = "id, hardware, dedicatedHosts" access = mgr.get_user(user_id, mask) env.fout(print_dedicated_access(access.get('dedicatedHosts', []))) env.fout(print_access(access.get('hardware', []), 'Hardware')) if virtual: mask = "id, virtualGuests" access = mgr.get_user(user_id, mask) env.fout(print_access(access.get('virtualGuests', []), 'Virtual Guests')) if logins: login_log = mgr.get_logins(user_id) env.fout(print_logins(login_log)) if events: event_log = mgr.get_events(user_id) env.fout(print_events(event_log))
def function[cli, parameter[env, identifier, keys, permissions, hardware, virtual, logins, events]]: constant[User details.] variable[mgr] assign[=] call[name[SoftLayer].UserManager, parameter[name[env].client]] variable[user_id] assign[=] call[name[helpers].resolve_id, parameter[name[mgr].resolve_ids, name[identifier], constant[username]]] variable[object_mask] assign[=] constant[userStatus[name], parent[id, username], apiAuthenticationKeys[authenticationKey], unsuccessfulLogins, successfulLogins] variable[user] assign[=] call[name[mgr].get_user, parameter[name[user_id], name[object_mask]]] call[name[env].fout, parameter[call[name[basic_info], parameter[name[user], name[keys]]]]] if name[permissions] begin[:] variable[perms] assign[=] call[name[mgr].get_user_permissions, parameter[name[user_id]]] call[name[env].fout, parameter[call[name[print_permissions], parameter[name[perms]]]]] if name[hardware] begin[:] variable[mask] assign[=] constant[id, hardware, dedicatedHosts] variable[access] assign[=] call[name[mgr].get_user, parameter[name[user_id], name[mask]]] call[name[env].fout, parameter[call[name[print_dedicated_access], parameter[call[name[access].get, parameter[constant[dedicatedHosts], list[[]]]]]]]] call[name[env].fout, parameter[call[name[print_access], parameter[call[name[access].get, parameter[constant[hardware], list[[]]]], constant[Hardware]]]]] if name[virtual] begin[:] variable[mask] assign[=] constant[id, virtualGuests] variable[access] assign[=] call[name[mgr].get_user, parameter[name[user_id], name[mask]]] call[name[env].fout, parameter[call[name[print_access], parameter[call[name[access].get, parameter[constant[virtualGuests], list[[]]]], constant[Virtual Guests]]]]] if name[logins] begin[:] variable[login_log] assign[=] call[name[mgr].get_logins, parameter[name[user_id]]] call[name[env].fout, parameter[call[name[print_logins], parameter[name[login_log]]]]] if name[events] begin[:] variable[event_log] assign[=] call[name[mgr].get_events, parameter[name[user_id]]] call[name[env].fout, parameter[call[name[print_events], parameter[name[event_log]]]]]
keyword[def] identifier[cli] ( identifier[env] , identifier[identifier] , identifier[keys] , identifier[permissions] , identifier[hardware] , identifier[virtual] , identifier[logins] , identifier[events] ): literal[string] identifier[mgr] = identifier[SoftLayer] . identifier[UserManager] ( identifier[env] . identifier[client] ) identifier[user_id] = identifier[helpers] . identifier[resolve_id] ( identifier[mgr] . identifier[resolve_ids] , identifier[identifier] , literal[string] ) identifier[object_mask] = literal[string] literal[string] identifier[user] = identifier[mgr] . identifier[get_user] ( identifier[user_id] , identifier[object_mask] ) identifier[env] . identifier[fout] ( identifier[basic_info] ( identifier[user] , identifier[keys] )) keyword[if] identifier[permissions] : identifier[perms] = identifier[mgr] . identifier[get_user_permissions] ( identifier[user_id] ) identifier[env] . identifier[fout] ( identifier[print_permissions] ( identifier[perms] )) keyword[if] identifier[hardware] : identifier[mask] = literal[string] identifier[access] = identifier[mgr] . identifier[get_user] ( identifier[user_id] , identifier[mask] ) identifier[env] . identifier[fout] ( identifier[print_dedicated_access] ( identifier[access] . identifier[get] ( literal[string] ,[]))) identifier[env] . identifier[fout] ( identifier[print_access] ( identifier[access] . identifier[get] ( literal[string] ,[]), literal[string] )) keyword[if] identifier[virtual] : identifier[mask] = literal[string] identifier[access] = identifier[mgr] . identifier[get_user] ( identifier[user_id] , identifier[mask] ) identifier[env] . identifier[fout] ( identifier[print_access] ( identifier[access] . identifier[get] ( literal[string] ,[]), literal[string] )) keyword[if] identifier[logins] : identifier[login_log] = identifier[mgr] . identifier[get_logins] ( identifier[user_id] ) identifier[env] . identifier[fout] ( identifier[print_logins] ( identifier[login_log] )) keyword[if] identifier[events] : identifier[event_log] = identifier[mgr] . identifier[get_events] ( identifier[user_id] ) identifier[env] . identifier[fout] ( identifier[print_events] ( identifier[event_log] ))
def cli(env, identifier, keys, permissions, hardware, virtual, logins, events): """User details.""" mgr = SoftLayer.UserManager(env.client) user_id = helpers.resolve_id(mgr.resolve_ids, identifier, 'username') object_mask = 'userStatus[name], parent[id, username], apiAuthenticationKeys[authenticationKey], unsuccessfulLogins, successfulLogins' user = mgr.get_user(user_id, object_mask) env.fout(basic_info(user, keys)) if permissions: perms = mgr.get_user_permissions(user_id) env.fout(print_permissions(perms)) # depends on [control=['if'], data=[]] if hardware: mask = 'id, hardware, dedicatedHosts' access = mgr.get_user(user_id, mask) env.fout(print_dedicated_access(access.get('dedicatedHosts', []))) env.fout(print_access(access.get('hardware', []), 'Hardware')) # depends on [control=['if'], data=[]] if virtual: mask = 'id, virtualGuests' access = mgr.get_user(user_id, mask) env.fout(print_access(access.get('virtualGuests', []), 'Virtual Guests')) # depends on [control=['if'], data=[]] if logins: login_log = mgr.get_logins(user_id) env.fout(print_logins(login_log)) # depends on [control=['if'], data=[]] if events: event_log = mgr.get_events(user_id) env.fout(print_events(event_log)) # depends on [control=['if'], data=[]]
def get_authc_info(self, identifier, session=None): """ If an Account requires credentials from multiple data stores, this AccountStore is responsible for aggregating them (composite) and returning the results in a single account object. :returns: a dict of account attributes """ user = self._get_user_query(session, identifier).first() creds = self._get_credential_query(session, identifier).all() if not creds: return None authc_info = {cred_type: {'credential': cred_value, 'failed_attempts': []} for cred_type, cred_value in creds} if 'totp_key' in authc_info: authc_info['totp_key']['2fa_info'] = {'phone_number': user.phone_number} return dict(account_locked=user.account_lock_millis, authc_info=authc_info)
def function[get_authc_info, parameter[self, identifier, session]]: constant[ If an Account requires credentials from multiple data stores, this AccountStore is responsible for aggregating them (composite) and returning the results in a single account object. :returns: a dict of account attributes ] variable[user] assign[=] call[call[name[self]._get_user_query, parameter[name[session], name[identifier]]].first, parameter[]] variable[creds] assign[=] call[call[name[self]._get_credential_query, parameter[name[session], name[identifier]]].all, parameter[]] if <ast.UnaryOp object at 0x7da20c6c7700> begin[:] return[constant[None]] variable[authc_info] assign[=] <ast.DictComp object at 0x7da20c6c62f0> if compare[constant[totp_key] in name[authc_info]] begin[:] call[call[name[authc_info]][constant[totp_key]]][constant[2fa_info]] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c7310>], [<ast.Attribute object at 0x7da20c6c4c70>]] return[call[name[dict], parameter[]]]
keyword[def] identifier[get_authc_info] ( identifier[self] , identifier[identifier] , identifier[session] = keyword[None] ): literal[string] identifier[user] = identifier[self] . identifier[_get_user_query] ( identifier[session] , identifier[identifier] ). identifier[first] () identifier[creds] = identifier[self] . identifier[_get_credential_query] ( identifier[session] , identifier[identifier] ). identifier[all] () keyword[if] keyword[not] identifier[creds] : keyword[return] keyword[None] identifier[authc_info] ={ identifier[cred_type] :{ literal[string] : identifier[cred_value] , literal[string] :[]} keyword[for] identifier[cred_type] , identifier[cred_value] keyword[in] identifier[creds] } keyword[if] literal[string] keyword[in] identifier[authc_info] : identifier[authc_info] [ literal[string] ][ literal[string] ]={ literal[string] : identifier[user] . identifier[phone_number] } keyword[return] identifier[dict] ( identifier[account_locked] = identifier[user] . identifier[account_lock_millis] , identifier[authc_info] = identifier[authc_info] )
def get_authc_info(self, identifier, session=None): """ If an Account requires credentials from multiple data stores, this AccountStore is responsible for aggregating them (composite) and returning the results in a single account object. :returns: a dict of account attributes """ user = self._get_user_query(session, identifier).first() creds = self._get_credential_query(session, identifier).all() if not creds: return None # depends on [control=['if'], data=[]] authc_info = {cred_type: {'credential': cred_value, 'failed_attempts': []} for (cred_type, cred_value) in creds} if 'totp_key' in authc_info: authc_info['totp_key']['2fa_info'] = {'phone_number': user.phone_number} # depends on [control=['if'], data=['authc_info']] return dict(account_locked=user.account_lock_millis, authc_info=authc_info)
def register_parser(self, type, parser, **meta): """Registers a parser of a format. :param type: The unique name of the format :param parser: The method to parse data as the format :param meta: The extra information associated with the format """ try: self.registered_formats[type]['parser'] = parser except KeyError: self.registered_formats[type] = {'parser': parser} if meta: self.register_meta(type, **meta)
def function[register_parser, parameter[self, type, parser]]: constant[Registers a parser of a format. :param type: The unique name of the format :param parser: The method to parse data as the format :param meta: The extra information associated with the format ] <ast.Try object at 0x7da204345900> if name[meta] begin[:] call[name[self].register_meta, parameter[name[type]]]
keyword[def] identifier[register_parser] ( identifier[self] , identifier[type] , identifier[parser] ,** identifier[meta] ): literal[string] keyword[try] : identifier[self] . identifier[registered_formats] [ identifier[type] ][ literal[string] ]= identifier[parser] keyword[except] identifier[KeyError] : identifier[self] . identifier[registered_formats] [ identifier[type] ]={ literal[string] : identifier[parser] } keyword[if] identifier[meta] : identifier[self] . identifier[register_meta] ( identifier[type] ,** identifier[meta] )
def register_parser(self, type, parser, **meta): """Registers a parser of a format. :param type: The unique name of the format :param parser: The method to parse data as the format :param meta: The extra information associated with the format """ try: self.registered_formats[type]['parser'] = parser # depends on [control=['try'], data=[]] except KeyError: self.registered_formats[type] = {'parser': parser} # depends on [control=['except'], data=[]] if meta: self.register_meta(type, **meta) # depends on [control=['if'], data=[]]
def match_keyword(self, keyword, string_match_type=DEFAULT_STRING_MATCH_TYPE, match=True): """Adds a keyword to match. Multiple keywords can be added to perform a boolean ``OR`` among them. A keyword may be applied to any of the elements defined in this object such as the display name, description or any method defined in an interface implemented by this object. arg: keyword (string): keyword to match arg: string_match_type (osid.type.Type): the string match type arg: match (boolean): ``true`` for a positive match, ``false`` for a negative match raise: InvalidArgument - ``keyword`` is not of ``string_match_type`` raise: NullArgument - ``keyword`` or ``string_match_type`` is ``null`` raise: Unsupported - ``supports_string_match_type(string_match_type)`` is ``false`` *compliance: mandatory -- This method must be implemented.* """ # Note: this currently ignores match argument match_value = self._get_string_match_value(keyword, string_match_type) for field_name in self._keyword_fields: if field_name not in self._keyword_terms: self._keyword_terms[field_name] = {'$in': list()} self._keyword_terms[field_name]['$in'].append(match_value)
def function[match_keyword, parameter[self, keyword, string_match_type, match]]: constant[Adds a keyword to match. Multiple keywords can be added to perform a boolean ``OR`` among them. A keyword may be applied to any of the elements defined in this object such as the display name, description or any method defined in an interface implemented by this object. arg: keyword (string): keyword to match arg: string_match_type (osid.type.Type): the string match type arg: match (boolean): ``true`` for a positive match, ``false`` for a negative match raise: InvalidArgument - ``keyword`` is not of ``string_match_type`` raise: NullArgument - ``keyword`` or ``string_match_type`` is ``null`` raise: Unsupported - ``supports_string_match_type(string_match_type)`` is ``false`` *compliance: mandatory -- This method must be implemented.* ] variable[match_value] assign[=] call[name[self]._get_string_match_value, parameter[name[keyword], name[string_match_type]]] for taget[name[field_name]] in starred[name[self]._keyword_fields] begin[:] if compare[name[field_name] <ast.NotIn object at 0x7da2590d7190> name[self]._keyword_terms] begin[:] call[name[self]._keyword_terms][name[field_name]] assign[=] dictionary[[<ast.Constant object at 0x7da1b0973010>], [<ast.Call object at 0x7da1b0973550>]] call[call[call[name[self]._keyword_terms][name[field_name]]][constant[$in]].append, parameter[name[match_value]]]
keyword[def] identifier[match_keyword] ( identifier[self] , identifier[keyword] , identifier[string_match_type] = identifier[DEFAULT_STRING_MATCH_TYPE] , identifier[match] = keyword[True] ): literal[string] identifier[match_value] = identifier[self] . identifier[_get_string_match_value] ( identifier[keyword] , identifier[string_match_type] ) keyword[for] identifier[field_name] keyword[in] identifier[self] . identifier[_keyword_fields] : keyword[if] identifier[field_name] keyword[not] keyword[in] identifier[self] . identifier[_keyword_terms] : identifier[self] . identifier[_keyword_terms] [ identifier[field_name] ]={ literal[string] : identifier[list] ()} identifier[self] . identifier[_keyword_terms] [ identifier[field_name] ][ literal[string] ]. identifier[append] ( identifier[match_value] )
def match_keyword(self, keyword, string_match_type=DEFAULT_STRING_MATCH_TYPE, match=True): """Adds a keyword to match. Multiple keywords can be added to perform a boolean ``OR`` among them. A keyword may be applied to any of the elements defined in this object such as the display name, description or any method defined in an interface implemented by this object. arg: keyword (string): keyword to match arg: string_match_type (osid.type.Type): the string match type arg: match (boolean): ``true`` for a positive match, ``false`` for a negative match raise: InvalidArgument - ``keyword`` is not of ``string_match_type`` raise: NullArgument - ``keyword`` or ``string_match_type`` is ``null`` raise: Unsupported - ``supports_string_match_type(string_match_type)`` is ``false`` *compliance: mandatory -- This method must be implemented.* """ # Note: this currently ignores match argument match_value = self._get_string_match_value(keyword, string_match_type) for field_name in self._keyword_fields: if field_name not in self._keyword_terms: self._keyword_terms[field_name] = {'$in': list()} # depends on [control=['if'], data=['field_name']] self._keyword_terms[field_name]['$in'].append(match_value) # depends on [control=['for'], data=['field_name']]
def create_from_fits(cls, fitsfile, norm_type='flux'): """Build a TSCube object from a fits file created by gttscube Parameters ---------- fitsfile : str Path to the tscube FITS file. norm_type : str String specifying the quantity used for the normalization """ tsmap = WcsNDMap.read(fitsfile) tab_e = Table.read(fitsfile, 'EBOUNDS') tab_s = Table.read(fitsfile, 'SCANDATA') tab_f = Table.read(fitsfile, 'FITDATA') tab_e = convert_sed_cols(tab_e) tab_s = convert_sed_cols(tab_s) tab_f = convert_sed_cols(tab_f) emin = np.array(tab_e['e_min']) emax = np.array(tab_e['e_max']) try: if str(tab_e['e_min'].unit) == 'keV': emin /= 1000. except: pass try: if str(tab_e['e_max'].unit) == 'keV': emax /= 1000. except: pass nebins = len(tab_e) npred = tab_e['ref_npred'] ndim = len(tsmap.data.shape) if ndim == 2: cube_shape = (tsmap.data.shape[0], tsmap.data.shape[1], nebins) elif ndim == 1: cube_shape = (tsmap.data.shape[0], nebins) else: raise RuntimeError("Counts map has dimension %i" % (ndim)) refSpec = ReferenceSpec.create_from_table(tab_e) nll_vals = -np.array(tab_s["dloglike_scan"]) norm_vals = np.array(tab_s["norm_scan"]) axis = MapAxis.from_edges(np.concatenate((emin, emax[-1:])), interp='log') geom_3d = tsmap.geom.to_cube([axis]) tscube = WcsNDMap(geom_3d, np.rollaxis(tab_s["ts"].reshape(cube_shape), 2, 0)) ncube = WcsNDMap(geom_3d, np.rollaxis(tab_s["norm"].reshape(cube_shape), 2, 0)) nmap = WcsNDMap(tsmap.geom, tab_f['fit_norm'].reshape(tsmap.data.shape)) ref_colname = 'ref_%s' % norm_type norm_vals *= tab_e[ref_colname][np.newaxis, :, np.newaxis] return cls(tsmap, nmap, tscube, ncube, norm_vals, nll_vals, refSpec, norm_type)
def function[create_from_fits, parameter[cls, fitsfile, norm_type]]: constant[Build a TSCube object from a fits file created by gttscube Parameters ---------- fitsfile : str Path to the tscube FITS file. norm_type : str String specifying the quantity used for the normalization ] variable[tsmap] assign[=] call[name[WcsNDMap].read, parameter[name[fitsfile]]] variable[tab_e] assign[=] call[name[Table].read, parameter[name[fitsfile], constant[EBOUNDS]]] variable[tab_s] assign[=] call[name[Table].read, parameter[name[fitsfile], constant[SCANDATA]]] variable[tab_f] assign[=] call[name[Table].read, parameter[name[fitsfile], constant[FITDATA]]] variable[tab_e] assign[=] call[name[convert_sed_cols], parameter[name[tab_e]]] variable[tab_s] assign[=] call[name[convert_sed_cols], parameter[name[tab_s]]] variable[tab_f] assign[=] call[name[convert_sed_cols], parameter[name[tab_f]]] variable[emin] assign[=] call[name[np].array, parameter[call[name[tab_e]][constant[e_min]]]] variable[emax] assign[=] call[name[np].array, parameter[call[name[tab_e]][constant[e_max]]]] <ast.Try object at 0x7da20c7c8970> <ast.Try object at 0x7da20c7c9990> variable[nebins] assign[=] call[name[len], parameter[name[tab_e]]] variable[npred] assign[=] call[name[tab_e]][constant[ref_npred]] variable[ndim] assign[=] call[name[len], parameter[name[tsmap].data.shape]] if compare[name[ndim] equal[==] constant[2]] begin[:] variable[cube_shape] assign[=] tuple[[<ast.Subscript object at 0x7da20c7cb1f0>, <ast.Subscript object at 0x7da20c7cac80>, <ast.Name object at 0x7da20c7ca110>]] variable[refSpec] assign[=] call[name[ReferenceSpec].create_from_table, parameter[name[tab_e]]] variable[nll_vals] assign[=] <ast.UnaryOp object at 0x7da20c7cbdc0> variable[norm_vals] assign[=] call[name[np].array, parameter[call[name[tab_s]][constant[norm_scan]]]] variable[axis] assign[=] call[name[MapAxis].from_edges, parameter[call[name[np].concatenate, parameter[tuple[[<ast.Name object at 0x7da20c7c98a0>, <ast.Subscript object at 0x7da20c7c8b50>]]]]]] variable[geom_3d] assign[=] call[name[tsmap].geom.to_cube, parameter[list[[<ast.Name object at 0x7da20c7cb6a0>]]]] variable[tscube] assign[=] call[name[WcsNDMap], parameter[name[geom_3d], call[name[np].rollaxis, parameter[call[call[name[tab_s]][constant[ts]].reshape, parameter[name[cube_shape]]], constant[2], constant[0]]]]] variable[ncube] assign[=] call[name[WcsNDMap], parameter[name[geom_3d], call[name[np].rollaxis, parameter[call[call[name[tab_s]][constant[norm]].reshape, parameter[name[cube_shape]]], constant[2], constant[0]]]]] variable[nmap] assign[=] call[name[WcsNDMap], parameter[name[tsmap].geom, call[call[name[tab_f]][constant[fit_norm]].reshape, parameter[name[tsmap].data.shape]]]] variable[ref_colname] assign[=] binary_operation[constant[ref_%s] <ast.Mod object at 0x7da2590d6920> name[norm_type]] <ast.AugAssign object at 0x7da18dc05a20> return[call[name[cls], parameter[name[tsmap], name[nmap], name[tscube], name[ncube], name[norm_vals], name[nll_vals], name[refSpec], name[norm_type]]]]
keyword[def] identifier[create_from_fits] ( identifier[cls] , identifier[fitsfile] , identifier[norm_type] = literal[string] ): literal[string] identifier[tsmap] = identifier[WcsNDMap] . identifier[read] ( identifier[fitsfile] ) identifier[tab_e] = identifier[Table] . identifier[read] ( identifier[fitsfile] , literal[string] ) identifier[tab_s] = identifier[Table] . identifier[read] ( identifier[fitsfile] , literal[string] ) identifier[tab_f] = identifier[Table] . identifier[read] ( identifier[fitsfile] , literal[string] ) identifier[tab_e] = identifier[convert_sed_cols] ( identifier[tab_e] ) identifier[tab_s] = identifier[convert_sed_cols] ( identifier[tab_s] ) identifier[tab_f] = identifier[convert_sed_cols] ( identifier[tab_f] ) identifier[emin] = identifier[np] . identifier[array] ( identifier[tab_e] [ literal[string] ]) identifier[emax] = identifier[np] . identifier[array] ( identifier[tab_e] [ literal[string] ]) keyword[try] : keyword[if] identifier[str] ( identifier[tab_e] [ literal[string] ]. identifier[unit] )== literal[string] : identifier[emin] /= literal[int] keyword[except] : keyword[pass] keyword[try] : keyword[if] identifier[str] ( identifier[tab_e] [ literal[string] ]. identifier[unit] )== literal[string] : identifier[emax] /= literal[int] keyword[except] : keyword[pass] identifier[nebins] = identifier[len] ( identifier[tab_e] ) identifier[npred] = identifier[tab_e] [ literal[string] ] identifier[ndim] = identifier[len] ( identifier[tsmap] . identifier[data] . identifier[shape] ) keyword[if] identifier[ndim] == literal[int] : identifier[cube_shape] =( identifier[tsmap] . identifier[data] . identifier[shape] [ literal[int] ], identifier[tsmap] . identifier[data] . identifier[shape] [ literal[int] ], identifier[nebins] ) keyword[elif] identifier[ndim] == literal[int] : identifier[cube_shape] =( identifier[tsmap] . identifier[data] . identifier[shape] [ literal[int] ], identifier[nebins] ) keyword[else] : keyword[raise] identifier[RuntimeError] ( literal[string] %( identifier[ndim] )) identifier[refSpec] = identifier[ReferenceSpec] . identifier[create_from_table] ( identifier[tab_e] ) identifier[nll_vals] =- identifier[np] . identifier[array] ( identifier[tab_s] [ literal[string] ]) identifier[norm_vals] = identifier[np] . identifier[array] ( identifier[tab_s] [ literal[string] ]) identifier[axis] = identifier[MapAxis] . identifier[from_edges] ( identifier[np] . identifier[concatenate] (( identifier[emin] , identifier[emax] [- literal[int] :])), identifier[interp] = literal[string] ) identifier[geom_3d] = identifier[tsmap] . identifier[geom] . identifier[to_cube] ([ identifier[axis] ]) identifier[tscube] = identifier[WcsNDMap] ( identifier[geom_3d] , identifier[np] . identifier[rollaxis] ( identifier[tab_s] [ literal[string] ]. identifier[reshape] ( identifier[cube_shape] ), literal[int] , literal[int] )) identifier[ncube] = identifier[WcsNDMap] ( identifier[geom_3d] , identifier[np] . identifier[rollaxis] ( identifier[tab_s] [ literal[string] ]. identifier[reshape] ( identifier[cube_shape] ), literal[int] , literal[int] )) identifier[nmap] = identifier[WcsNDMap] ( identifier[tsmap] . identifier[geom] , identifier[tab_f] [ literal[string] ]. identifier[reshape] ( identifier[tsmap] . identifier[data] . identifier[shape] )) identifier[ref_colname] = literal[string] % identifier[norm_type] identifier[norm_vals] *= identifier[tab_e] [ identifier[ref_colname] ][ identifier[np] . identifier[newaxis] ,:, identifier[np] . identifier[newaxis] ] keyword[return] identifier[cls] ( identifier[tsmap] , identifier[nmap] , identifier[tscube] , identifier[ncube] , identifier[norm_vals] , identifier[nll_vals] , identifier[refSpec] , identifier[norm_type] )
def create_from_fits(cls, fitsfile, norm_type='flux'): """Build a TSCube object from a fits file created by gttscube Parameters ---------- fitsfile : str Path to the tscube FITS file. norm_type : str String specifying the quantity used for the normalization """ tsmap = WcsNDMap.read(fitsfile) tab_e = Table.read(fitsfile, 'EBOUNDS') tab_s = Table.read(fitsfile, 'SCANDATA') tab_f = Table.read(fitsfile, 'FITDATA') tab_e = convert_sed_cols(tab_e) tab_s = convert_sed_cols(tab_s) tab_f = convert_sed_cols(tab_f) emin = np.array(tab_e['e_min']) emax = np.array(tab_e['e_max']) try: if str(tab_e['e_min'].unit) == 'keV': emin /= 1000.0 # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] try: if str(tab_e['e_max'].unit) == 'keV': emax /= 1000.0 # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] nebins = len(tab_e) npred = tab_e['ref_npred'] ndim = len(tsmap.data.shape) if ndim == 2: cube_shape = (tsmap.data.shape[0], tsmap.data.shape[1], nebins) # depends on [control=['if'], data=[]] elif ndim == 1: cube_shape = (tsmap.data.shape[0], nebins) # depends on [control=['if'], data=[]] else: raise RuntimeError('Counts map has dimension %i' % ndim) refSpec = ReferenceSpec.create_from_table(tab_e) nll_vals = -np.array(tab_s['dloglike_scan']) norm_vals = np.array(tab_s['norm_scan']) axis = MapAxis.from_edges(np.concatenate((emin, emax[-1:])), interp='log') geom_3d = tsmap.geom.to_cube([axis]) tscube = WcsNDMap(geom_3d, np.rollaxis(tab_s['ts'].reshape(cube_shape), 2, 0)) ncube = WcsNDMap(geom_3d, np.rollaxis(tab_s['norm'].reshape(cube_shape), 2, 0)) nmap = WcsNDMap(tsmap.geom, tab_f['fit_norm'].reshape(tsmap.data.shape)) ref_colname = 'ref_%s' % norm_type norm_vals *= tab_e[ref_colname][np.newaxis, :, np.newaxis] return cls(tsmap, nmap, tscube, ncube, norm_vals, nll_vals, refSpec, norm_type)
def luminosity_to_flux(lumErg_S, dist_Mpc): """ *Convert luminosity to a flux* **Key Arguments:** - ``lumErg_S`` -- luminosity in ergs/sec - ``dist_Mpc`` -- distance in Mpc **Return:** - ``fluxErg_cm2_S`` -- flux in ergs/cm2/s """ ################ > IMPORTS ################ ## STANDARD LIB ## ## THIRD PARTY ## import numpy as np import math ## LOCAL APPLICATION ## ################ > VARIABLE SETTINGS ###### ################ >ACTION(S) ################ # Convert the distance to cm distCm = dist_Mpc * MPC_2_CMS fluxErg_cm2_S = lumErg_S / (4 * np.pi * distCm ** 2) return fluxErg_cm2_S
def function[luminosity_to_flux, parameter[lumErg_S, dist_Mpc]]: constant[ *Convert luminosity to a flux* **Key Arguments:** - ``lumErg_S`` -- luminosity in ergs/sec - ``dist_Mpc`` -- distance in Mpc **Return:** - ``fluxErg_cm2_S`` -- flux in ergs/cm2/s ] import module[numpy] as alias[np] import module[math] variable[distCm] assign[=] binary_operation[name[dist_Mpc] * name[MPC_2_CMS]] variable[fluxErg_cm2_S] assign[=] binary_operation[name[lumErg_S] / binary_operation[binary_operation[constant[4] * name[np].pi] * binary_operation[name[distCm] ** constant[2]]]] return[name[fluxErg_cm2_S]]
keyword[def] identifier[luminosity_to_flux] ( identifier[lumErg_S] , identifier[dist_Mpc] ): literal[string] keyword[import] identifier[numpy] keyword[as] identifier[np] keyword[import] identifier[math] identifier[distCm] = identifier[dist_Mpc] * identifier[MPC_2_CMS] identifier[fluxErg_cm2_S] = identifier[lumErg_S] /( literal[int] * identifier[np] . identifier[pi] * identifier[distCm] ** literal[int] ) keyword[return] identifier[fluxErg_cm2_S]
def luminosity_to_flux(lumErg_S, dist_Mpc): """ *Convert luminosity to a flux* **Key Arguments:** - ``lumErg_S`` -- luminosity in ergs/sec - ``dist_Mpc`` -- distance in Mpc **Return:** - ``fluxErg_cm2_S`` -- flux in ergs/cm2/s """ ################ > IMPORTS ################ ## STANDARD LIB ## ## THIRD PARTY ## import numpy as np import math ## LOCAL APPLICATION ## ################ > VARIABLE SETTINGS ###### ################ >ACTION(S) ################ # Convert the distance to cm distCm = dist_Mpc * MPC_2_CMS fluxErg_cm2_S = lumErg_S / (4 * np.pi * distCm ** 2) return fluxErg_cm2_S
def status(backend): '''print the status for all or one of the backends. ''' print('[backend status]') settings = read_client_secrets() print('There are %s clients found in secrets.' %len(settings)) if 'SREGISTRY_CLIENT' in settings: print('active: %s' %settings['SREGISTRY_CLIENT']) update_secrets(settings) else: print('There is no active client.')
def function[status, parameter[backend]]: constant[print the status for all or one of the backends. ] call[name[print], parameter[constant[[backend status]]]] variable[settings] assign[=] call[name[read_client_secrets], parameter[]] call[name[print], parameter[binary_operation[constant[There are %s clients found in secrets.] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[settings]]]]]] if compare[constant[SREGISTRY_CLIENT] in name[settings]] begin[:] call[name[print], parameter[binary_operation[constant[active: %s] <ast.Mod object at 0x7da2590d6920> call[name[settings]][constant[SREGISTRY_CLIENT]]]]] call[name[update_secrets], parameter[name[settings]]]
keyword[def] identifier[status] ( identifier[backend] ): literal[string] identifier[print] ( literal[string] ) identifier[settings] = identifier[read_client_secrets] () identifier[print] ( literal[string] % identifier[len] ( identifier[settings] )) keyword[if] literal[string] keyword[in] identifier[settings] : identifier[print] ( literal[string] % identifier[settings] [ literal[string] ]) identifier[update_secrets] ( identifier[settings] ) keyword[else] : identifier[print] ( literal[string] )
def status(backend): """print the status for all or one of the backends. """ print('[backend status]') settings = read_client_secrets() print('There are %s clients found in secrets.' % len(settings)) if 'SREGISTRY_CLIENT' in settings: print('active: %s' % settings['SREGISTRY_CLIENT']) update_secrets(settings) # depends on [control=['if'], data=['settings']] else: print('There is no active client.')
def interpolate(values, value_times, sampling_rate=1000): """ 3rd order spline interpolation. Parameters ---------- values : dataframe Values. value_times : list Time indices of values. sampling_rate : int Sampling rate (samples/second). Returns ---------- signal : pd.Series An array containing the values indexed by time. Example ---------- >>> import neurokit as nk >>> signal = interpolate([800, 900, 700, 500], [1000, 2000, 3000, 4000], sampling_rate=1000) >>> pd.Series(signal).plot() Notes ---------- *Authors* - `Dominique Makowski <https://dominiquemakowski.github.io/>`_ *Dependencies* - scipy - pandas """ # values=RRis.copy() # value_times=beats_times.copy() # Preprocessing initial_index = value_times[0] value_times = np.array(value_times) - initial_index # fit a 3rd degree spline on the data. spline = scipy.interpolate.splrep(x=value_times, y=values, k=3, s=0) # s=0 guarantees that it will pass through ALL the given points x = np.arange(0, value_times[-1], 1) # Get the values indexed per time signal = scipy.interpolate.splev(x=x, tck=spline, der=0) # Transform to series signal = pd.Series(signal) signal.index = np.array(np.arange(initial_index, initial_index+len(signal), 1)) return(signal)
def function[interpolate, parameter[values, value_times, sampling_rate]]: constant[ 3rd order spline interpolation. Parameters ---------- values : dataframe Values. value_times : list Time indices of values. sampling_rate : int Sampling rate (samples/second). Returns ---------- signal : pd.Series An array containing the values indexed by time. Example ---------- >>> import neurokit as nk >>> signal = interpolate([800, 900, 700, 500], [1000, 2000, 3000, 4000], sampling_rate=1000) >>> pd.Series(signal).plot() Notes ---------- *Authors* - `Dominique Makowski <https://dominiquemakowski.github.io/>`_ *Dependencies* - scipy - pandas ] variable[initial_index] assign[=] call[name[value_times]][constant[0]] variable[value_times] assign[=] binary_operation[call[name[np].array, parameter[name[value_times]]] - name[initial_index]] variable[spline] assign[=] call[name[scipy].interpolate.splrep, parameter[]] variable[x] assign[=] call[name[np].arange, parameter[constant[0], call[name[value_times]][<ast.UnaryOp object at 0x7da2054a4640>], constant[1]]] variable[signal] assign[=] call[name[scipy].interpolate.splev, parameter[]] variable[signal] assign[=] call[name[pd].Series, parameter[name[signal]]] name[signal].index assign[=] call[name[np].array, parameter[call[name[np].arange, parameter[name[initial_index], binary_operation[name[initial_index] + call[name[len], parameter[name[signal]]]], constant[1]]]]] return[name[signal]]
keyword[def] identifier[interpolate] ( identifier[values] , identifier[value_times] , identifier[sampling_rate] = literal[int] ): literal[string] identifier[initial_index] = identifier[value_times] [ literal[int] ] identifier[value_times] = identifier[np] . identifier[array] ( identifier[value_times] )- identifier[initial_index] identifier[spline] = identifier[scipy] . identifier[interpolate] . identifier[splrep] ( identifier[x] = identifier[value_times] , identifier[y] = identifier[values] , identifier[k] = literal[int] , identifier[s] = literal[int] ) identifier[x] = identifier[np] . identifier[arange] ( literal[int] , identifier[value_times] [- literal[int] ], literal[int] ) identifier[signal] = identifier[scipy] . identifier[interpolate] . identifier[splev] ( identifier[x] = identifier[x] , identifier[tck] = identifier[spline] , identifier[der] = literal[int] ) identifier[signal] = identifier[pd] . identifier[Series] ( identifier[signal] ) identifier[signal] . identifier[index] = identifier[np] . identifier[array] ( identifier[np] . identifier[arange] ( identifier[initial_index] , identifier[initial_index] + identifier[len] ( identifier[signal] ), literal[int] )) keyword[return] ( identifier[signal] )
def interpolate(values, value_times, sampling_rate=1000): """ 3rd order spline interpolation. Parameters ---------- values : dataframe Values. value_times : list Time indices of values. sampling_rate : int Sampling rate (samples/second). Returns ---------- signal : pd.Series An array containing the values indexed by time. Example ---------- >>> import neurokit as nk >>> signal = interpolate([800, 900, 700, 500], [1000, 2000, 3000, 4000], sampling_rate=1000) >>> pd.Series(signal).plot() Notes ---------- *Authors* - `Dominique Makowski <https://dominiquemakowski.github.io/>`_ *Dependencies* - scipy - pandas """ # values=RRis.copy() # value_times=beats_times.copy() # Preprocessing initial_index = value_times[0] value_times = np.array(value_times) - initial_index # fit a 3rd degree spline on the data. spline = scipy.interpolate.splrep(x=value_times, y=values, k=3, s=0) # s=0 guarantees that it will pass through ALL the given points x = np.arange(0, value_times[-1], 1) # Get the values indexed per time signal = scipy.interpolate.splev(x=x, tck=spline, der=0) # Transform to series signal = pd.Series(signal) signal.index = np.array(np.arange(initial_index, initial_index + len(signal), 1)) return signal
def blocks(self, *args, **kwargs): """ Interface to apply audiolazy.blocks directly in a stream, returning another stream. Use keyword args. """ return Stream(blocks(iter(self), *args, **kwargs))
def function[blocks, parameter[self]]: constant[ Interface to apply audiolazy.blocks directly in a stream, returning another stream. Use keyword args. ] return[call[name[Stream], parameter[call[name[blocks], parameter[call[name[iter], parameter[name[self]]], <ast.Starred object at 0x7da1b07fbf40>]]]]]
keyword[def] identifier[blocks] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[Stream] ( identifier[blocks] ( identifier[iter] ( identifier[self] ),* identifier[args] ,** identifier[kwargs] ))
def blocks(self, *args, **kwargs): """ Interface to apply audiolazy.blocks directly in a stream, returning another stream. Use keyword args. """ return Stream(blocks(iter(self), *args, **kwargs))
def __json_date_parse(json_object): """ Parse dates in certain known json fields, if possible. """ known_date_fields = ["created_at", "week", "day", "expires_at", "scheduled_at"] for k, v in json_object.items(): if k in known_date_fields: if v != None: try: if isinstance(v, int): json_object[k] = datetime.datetime.fromtimestamp(v, pytz.utc) else: json_object[k] = dateutil.parser.parse(v) except: raise MastodonAPIError('Encountered invalid date.') return json_object
def function[__json_date_parse, parameter[json_object]]: constant[ Parse dates in certain known json fields, if possible. ] variable[known_date_fields] assign[=] list[[<ast.Constant object at 0x7da20c6e4b80>, <ast.Constant object at 0x7da20c6e7640>, <ast.Constant object at 0x7da20c6e6f80>, <ast.Constant object at 0x7da20c6e4820>, <ast.Constant object at 0x7da20c6e6290>]] for taget[tuple[[<ast.Name object at 0x7da20c6e7d60>, <ast.Name object at 0x7da20c6e6aa0>]]] in starred[call[name[json_object].items, parameter[]]] begin[:] if compare[name[k] in name[known_date_fields]] begin[:] if compare[name[v] not_equal[!=] constant[None]] begin[:] <ast.Try object at 0x7da20c6e6860> return[name[json_object]]
keyword[def] identifier[__json_date_parse] ( identifier[json_object] ): literal[string] identifier[known_date_fields] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[json_object] . identifier[items] (): keyword[if] identifier[k] keyword[in] identifier[known_date_fields] : keyword[if] identifier[v] != keyword[None] : keyword[try] : keyword[if] identifier[isinstance] ( identifier[v] , identifier[int] ): identifier[json_object] [ identifier[k] ]= identifier[datetime] . identifier[datetime] . identifier[fromtimestamp] ( identifier[v] , identifier[pytz] . identifier[utc] ) keyword[else] : identifier[json_object] [ identifier[k] ]= identifier[dateutil] . identifier[parser] . identifier[parse] ( identifier[v] ) keyword[except] : keyword[raise] identifier[MastodonAPIError] ( literal[string] ) keyword[return] identifier[json_object]
def __json_date_parse(json_object): """ Parse dates in certain known json fields, if possible. """ known_date_fields = ['created_at', 'week', 'day', 'expires_at', 'scheduled_at'] for (k, v) in json_object.items(): if k in known_date_fields: if v != None: try: if isinstance(v, int): json_object[k] = datetime.datetime.fromtimestamp(v, pytz.utc) # depends on [control=['if'], data=[]] else: json_object[k] = dateutil.parser.parse(v) # depends on [control=['try'], data=[]] except: raise MastodonAPIError('Encountered invalid date.') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['v']] # depends on [control=['if'], data=['k']] # depends on [control=['for'], data=[]] return json_object
def annotations_func(func): """Works like annotations, but is only applicable to functions, methods and properties. """ if not has_type_hints(func): # What about defaults? func.__annotations__ = {} func.__annotations__ = _get_type_hints(func, infer_defaults = False) return func
def function[annotations_func, parameter[func]]: constant[Works like annotations, but is only applicable to functions, methods and properties. ] if <ast.UnaryOp object at 0x7da1b0d48520> begin[:] name[func].__annotations__ assign[=] dictionary[[], []] name[func].__annotations__ assign[=] call[name[_get_type_hints], parameter[name[func]]] return[name[func]]
keyword[def] identifier[annotations_func] ( identifier[func] ): literal[string] keyword[if] keyword[not] identifier[has_type_hints] ( identifier[func] ): identifier[func] . identifier[__annotations__] ={} identifier[func] . identifier[__annotations__] = identifier[_get_type_hints] ( identifier[func] , identifier[infer_defaults] = keyword[False] ) keyword[return] identifier[func]
def annotations_func(func): """Works like annotations, but is only applicable to functions, methods and properties. """ if not has_type_hints(func): # What about defaults? func.__annotations__ = {} # depends on [control=['if'], data=[]] func.__annotations__ = _get_type_hints(func, infer_defaults=False) return func
def decrypt(self, ciphertext, nonce=None, encoder=encoding.RawEncoder): """ Decrypts the ciphertext using the `nonce` (explicitly, when passed as a parameter or implicitly, when omitted, as part of the ciphertext) and returns the plaintext message. :param ciphertext: [:class:`bytes`] The encrypted message to decrypt :param nonce: [:class:`bytes`] The nonce used when encrypting the ciphertext :param encoder: The encoder used to decode the ciphertext. :rtype: [:class:`bytes`] """ # Decode our ciphertext ciphertext = encoder.decode(ciphertext) if nonce is None: # If we were given the nonce and ciphertext combined, split them. nonce = ciphertext[:self.NONCE_SIZE] ciphertext = ciphertext[self.NONCE_SIZE:] if len(nonce) != self.NONCE_SIZE: raise exc.ValueError( "The nonce must be exactly %s bytes long" % self.NONCE_SIZE, ) plaintext = nacl.bindings.crypto_secretbox_open(ciphertext, nonce, self._key) return plaintext
def function[decrypt, parameter[self, ciphertext, nonce, encoder]]: constant[ Decrypts the ciphertext using the `nonce` (explicitly, when passed as a parameter or implicitly, when omitted, as part of the ciphertext) and returns the plaintext message. :param ciphertext: [:class:`bytes`] The encrypted message to decrypt :param nonce: [:class:`bytes`] The nonce used when encrypting the ciphertext :param encoder: The encoder used to decode the ciphertext. :rtype: [:class:`bytes`] ] variable[ciphertext] assign[=] call[name[encoder].decode, parameter[name[ciphertext]]] if compare[name[nonce] is constant[None]] begin[:] variable[nonce] assign[=] call[name[ciphertext]][<ast.Slice object at 0x7da2044c1120>] variable[ciphertext] assign[=] call[name[ciphertext]][<ast.Slice object at 0x7da2044c2380>] if compare[call[name[len], parameter[name[nonce]]] not_equal[!=] name[self].NONCE_SIZE] begin[:] <ast.Raise object at 0x7da2044c3d00> variable[plaintext] assign[=] call[name[nacl].bindings.crypto_secretbox_open, parameter[name[ciphertext], name[nonce], name[self]._key]] return[name[plaintext]]
keyword[def] identifier[decrypt] ( identifier[self] , identifier[ciphertext] , identifier[nonce] = keyword[None] , identifier[encoder] = identifier[encoding] . identifier[RawEncoder] ): literal[string] identifier[ciphertext] = identifier[encoder] . identifier[decode] ( identifier[ciphertext] ) keyword[if] identifier[nonce] keyword[is] keyword[None] : identifier[nonce] = identifier[ciphertext] [: identifier[self] . identifier[NONCE_SIZE] ] identifier[ciphertext] = identifier[ciphertext] [ identifier[self] . identifier[NONCE_SIZE] :] keyword[if] identifier[len] ( identifier[nonce] )!= identifier[self] . identifier[NONCE_SIZE] : keyword[raise] identifier[exc] . identifier[ValueError] ( literal[string] % identifier[self] . identifier[NONCE_SIZE] , ) identifier[plaintext] = identifier[nacl] . identifier[bindings] . identifier[crypto_secretbox_open] ( identifier[ciphertext] , identifier[nonce] , identifier[self] . identifier[_key] ) keyword[return] identifier[plaintext]
def decrypt(self, ciphertext, nonce=None, encoder=encoding.RawEncoder): """ Decrypts the ciphertext using the `nonce` (explicitly, when passed as a parameter or implicitly, when omitted, as part of the ciphertext) and returns the plaintext message. :param ciphertext: [:class:`bytes`] The encrypted message to decrypt :param nonce: [:class:`bytes`] The nonce used when encrypting the ciphertext :param encoder: The encoder used to decode the ciphertext. :rtype: [:class:`bytes`] """ # Decode our ciphertext ciphertext = encoder.decode(ciphertext) if nonce is None: # If we were given the nonce and ciphertext combined, split them. nonce = ciphertext[:self.NONCE_SIZE] ciphertext = ciphertext[self.NONCE_SIZE:] # depends on [control=['if'], data=['nonce']] if len(nonce) != self.NONCE_SIZE: raise exc.ValueError('The nonce must be exactly %s bytes long' % self.NONCE_SIZE) # depends on [control=['if'], data=[]] plaintext = nacl.bindings.crypto_secretbox_open(ciphertext, nonce, self._key) return plaintext
def _args_from_dict(ddata: Mapping[str, Any]): """Allows to construct an instance of AnnData from a dictionary. Acts as interface for the communication with the hdf5 file. In particular, from a dict that has been written using ``AnnData._to_dict_fixed_width_arrays``. """ d_true_keys = {} # backwards compat uns_is_not_key = False valid_keys = [] for keys in AnnData._H5_ALIASES.values(): valid_keys += keys valid_keys += ['raw.X', 'raw.var', 'raw.varm', 'raw.cat'] for key in ddata.keys(): # if there is another key then the prdedefined # then we are reading the old format if key not in valid_keys: uns_is_not_key = True for true_key, keys in AnnData._H5_ALIASES.items(): for key in keys: if key in ddata: d_true_keys[true_key] = ddata[key] if uns_is_not_key: del ddata[key] break else: d_true_keys[true_key] = None # transform recarray to dataframe for true_key, keys in AnnData._H5_ALIASES_NAMES.items(): if d_true_keys[true_key] is not None: for key in keys: if key in d_true_keys[true_key].dtype.names: d_true_keys[true_key] = pd.DataFrame.from_records( d_true_keys[true_key], index=key) break d_true_keys[true_key].index = d_true_keys[true_key].index.astype('U') # transform to unicode string # TODO: this is quite a hack for c in d_true_keys[true_key].columns: if is_string_dtype(d_true_keys[true_key][c]): d_true_keys[true_key][c] = pd.Index( d_true_keys[true_key][c]).astype('U').values # these are the category fields k_to_delete = [] items = ( ddata.items() if uns_is_not_key else ddata['uns'].items() if 'uns' in ddata else [] ) for k, v in items: if k.endswith('_categories'): k_stripped = k.replace('_categories', '') if isinstance(v, (str, int)): # fix categories with a single category v = [v] for ann in ['obs', 'var']: if k_stripped in d_true_keys[ann]: d_true_keys[ann][k_stripped] = pd.Categorical.from_codes( codes=d_true_keys[ann][k_stripped].values, categories=v, ) k_to_delete.append(k) for k in k_to_delete: if uns_is_not_key: del ddata[k] else: del ddata['uns'][k] # assign the variables X = d_true_keys['X'] obs = d_true_keys['obs'] obsm = d_true_keys['obsm'] var = d_true_keys['var'] varm = d_true_keys['varm'] layers = d_true_keys['layers'] raw = None if 'raw.X' in ddata: raw = {} raw['X'] = ddata['raw.X'] del ddata['raw.X'] # get the dataframe raw['var'] = pd.DataFrame.from_records( ddata['raw.var'], index='index') del ddata['raw.var'] raw['var'].index = raw['var'].index.astype('U') # transform to unicode string for c in raw['var'].columns: if is_string_dtype(raw['var'][c]): raw['var'][c] = pd.Index(raw['var'][c]).astype('U').values # these are the category fields if 'raw.cat' in ddata: # old h5ad didn't have that field for k, v in ddata['raw.cat'].items(): if k.endswith('_categories'): k_stripped = k.replace('_categories', '') if isinstance(v, (str, int)): # fix categories with a single category v = [v] raw['var'][k_stripped] = pd.Categorical.from_codes( codes=raw['var'][k_stripped].values, categories=v) del ddata['raw.cat'] if 'raw.varm' in ddata: raw['varm'] = ddata['raw.varm'] del ddata['raw.varm'] elif raw is not None: raw['varm'] = None # the remaining fields are the unstructured annotation uns = ( ddata if uns_is_not_key else ddata['uns'] if 'uns' in ddata else {} ) return X, obs, var, uns, obsm, varm, layers, raw
def function[_args_from_dict, parameter[ddata]]: constant[Allows to construct an instance of AnnData from a dictionary. Acts as interface for the communication with the hdf5 file. In particular, from a dict that has been written using ``AnnData._to_dict_fixed_width_arrays``. ] variable[d_true_keys] assign[=] dictionary[[], []] variable[uns_is_not_key] assign[=] constant[False] variable[valid_keys] assign[=] list[[]] for taget[name[keys]] in starred[call[name[AnnData]._H5_ALIASES.values, parameter[]]] begin[:] <ast.AugAssign object at 0x7da1b20b40d0> <ast.AugAssign object at 0x7da1b20b64a0> for taget[name[key]] in starred[call[name[ddata].keys, parameter[]]] begin[:] if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[valid_keys]] begin[:] variable[uns_is_not_key] assign[=] constant[True] for taget[tuple[[<ast.Name object at 0x7da1b20b7730>, <ast.Name object at 0x7da1b20b7070>]]] in starred[call[name[AnnData]._H5_ALIASES.items, parameter[]]] begin[:] for taget[name[key]] in starred[name[keys]] begin[:] if compare[name[key] in name[ddata]] begin[:] call[name[d_true_keys]][name[true_key]] assign[=] call[name[ddata]][name[key]] if name[uns_is_not_key] begin[:] <ast.Delete object at 0x7da1b20b6a10> break for taget[tuple[[<ast.Name object at 0x7da1b20b4040>, <ast.Name object at 0x7da1b20b5e40>]]] in starred[call[name[AnnData]._H5_ALIASES_NAMES.items, parameter[]]] begin[:] if compare[call[name[d_true_keys]][name[true_key]] is_not constant[None]] begin[:] for taget[name[key]] in starred[name[keys]] begin[:] if compare[name[key] in call[name[d_true_keys]][name[true_key]].dtype.names] begin[:] call[name[d_true_keys]][name[true_key]] assign[=] call[name[pd].DataFrame.from_records, parameter[call[name[d_true_keys]][name[true_key]]]] break call[name[d_true_keys]][name[true_key]].index assign[=] call[call[name[d_true_keys]][name[true_key]].index.astype, parameter[constant[U]]] for taget[name[c]] in starred[call[name[d_true_keys]][name[true_key]].columns] begin[:] if call[name[is_string_dtype], parameter[call[call[name[d_true_keys]][name[true_key]]][name[c]]]] begin[:] call[call[name[d_true_keys]][name[true_key]]][name[c]] assign[=] call[call[name[pd].Index, parameter[call[call[name[d_true_keys]][name[true_key]]][name[c]]]].astype, parameter[constant[U]]].values variable[k_to_delete] assign[=] list[[]] variable[items] assign[=] <ast.IfExp object at 0x7da20c992920> for taget[tuple[[<ast.Name object at 0x7da20c990af0>, <ast.Name object at 0x7da20c991990>]]] in starred[name[items]] begin[:] if call[name[k].endswith, parameter[constant[_categories]]] begin[:] variable[k_stripped] assign[=] call[name[k].replace, parameter[constant[_categories], constant[]]] if call[name[isinstance], parameter[name[v], tuple[[<ast.Name object at 0x7da20c9922c0>, <ast.Name object at 0x7da20c993ac0>]]]] begin[:] variable[v] assign[=] list[[<ast.Name object at 0x7da20c993340>]] for taget[name[ann]] in starred[list[[<ast.Constant object at 0x7da20c991f90>, <ast.Constant object at 0x7da20c991cf0>]]] begin[:] if compare[name[k_stripped] in call[name[d_true_keys]][name[ann]]] begin[:] call[call[name[d_true_keys]][name[ann]]][name[k_stripped]] assign[=] call[name[pd].Categorical.from_codes, parameter[]] call[name[k_to_delete].append, parameter[name[k]]] for taget[name[k]] in starred[name[k_to_delete]] begin[:] if name[uns_is_not_key] begin[:] <ast.Delete object at 0x7da20c993dc0> variable[X] assign[=] call[name[d_true_keys]][constant[X]] variable[obs] assign[=] call[name[d_true_keys]][constant[obs]] variable[obsm] assign[=] call[name[d_true_keys]][constant[obsm]] variable[var] assign[=] call[name[d_true_keys]][constant[var]] variable[varm] assign[=] call[name[d_true_keys]][constant[varm]] variable[layers] assign[=] call[name[d_true_keys]][constant[layers]] variable[raw] assign[=] constant[None] if compare[constant[raw.X] in name[ddata]] begin[:] variable[raw] assign[=] dictionary[[], []] call[name[raw]][constant[X]] assign[=] call[name[ddata]][constant[raw.X]] <ast.Delete object at 0x7da20c992a40> call[name[raw]][constant[var]] assign[=] call[name[pd].DataFrame.from_records, parameter[call[name[ddata]][constant[raw.var]]]] <ast.Delete object at 0x7da20c992fe0> call[name[raw]][constant[var]].index assign[=] call[call[name[raw]][constant[var]].index.astype, parameter[constant[U]]] for taget[name[c]] in starred[call[name[raw]][constant[var]].columns] begin[:] if call[name[is_string_dtype], parameter[call[call[name[raw]][constant[var]]][name[c]]]] begin[:] call[call[name[raw]][constant[var]]][name[c]] assign[=] call[call[name[pd].Index, parameter[call[call[name[raw]][constant[var]]][name[c]]]].astype, parameter[constant[U]]].values if compare[constant[raw.cat] in name[ddata]] begin[:] for taget[tuple[[<ast.Name object at 0x7da20c993f40>, <ast.Name object at 0x7da20c990070>]]] in starred[call[call[name[ddata]][constant[raw.cat]].items, parameter[]]] begin[:] if call[name[k].endswith, parameter[constant[_categories]]] begin[:] variable[k_stripped] assign[=] call[name[k].replace, parameter[constant[_categories], constant[]]] if call[name[isinstance], parameter[name[v], tuple[[<ast.Name object at 0x7da20c993b20>, <ast.Name object at 0x7da20c993bb0>]]]] begin[:] variable[v] assign[=] list[[<ast.Name object at 0x7da20c992590>]] call[call[name[raw]][constant[var]]][name[k_stripped]] assign[=] call[name[pd].Categorical.from_codes, parameter[]] <ast.Delete object at 0x7da20c991420> if compare[constant[raw.varm] in name[ddata]] begin[:] call[name[raw]][constant[varm]] assign[=] call[name[ddata]][constant[raw.varm]] <ast.Delete object at 0x7da20c9912a0> variable[uns] assign[=] <ast.IfExp object at 0x7da20c992020> return[tuple[[<ast.Name object at 0x7da20c9920b0>, <ast.Name object at 0x7da20c991e40>, <ast.Name object at 0x7da20c9911e0>, <ast.Name object at 0x7da20c9921a0>, <ast.Name object at 0x7da20c993940>, <ast.Name object at 0x7da20c993100>, <ast.Name object at 0x7da20c992da0>, <ast.Name object at 0x7da20c993a00>]]]
keyword[def] identifier[_args_from_dict] ( identifier[ddata] : identifier[Mapping] [ identifier[str] , identifier[Any] ]): literal[string] identifier[d_true_keys] ={} identifier[uns_is_not_key] = keyword[False] identifier[valid_keys] =[] keyword[for] identifier[keys] keyword[in] identifier[AnnData] . identifier[_H5_ALIASES] . identifier[values] (): identifier[valid_keys] += identifier[keys] identifier[valid_keys] +=[ literal[string] , literal[string] , literal[string] , literal[string] ] keyword[for] identifier[key] keyword[in] identifier[ddata] . identifier[keys] (): keyword[if] identifier[key] keyword[not] keyword[in] identifier[valid_keys] : identifier[uns_is_not_key] = keyword[True] keyword[for] identifier[true_key] , identifier[keys] keyword[in] identifier[AnnData] . identifier[_H5_ALIASES] . identifier[items] (): keyword[for] identifier[key] keyword[in] identifier[keys] : keyword[if] identifier[key] keyword[in] identifier[ddata] : identifier[d_true_keys] [ identifier[true_key] ]= identifier[ddata] [ identifier[key] ] keyword[if] identifier[uns_is_not_key] : keyword[del] identifier[ddata] [ identifier[key] ] keyword[break] keyword[else] : identifier[d_true_keys] [ identifier[true_key] ]= keyword[None] keyword[for] identifier[true_key] , identifier[keys] keyword[in] identifier[AnnData] . identifier[_H5_ALIASES_NAMES] . identifier[items] (): keyword[if] identifier[d_true_keys] [ identifier[true_key] ] keyword[is] keyword[not] keyword[None] : keyword[for] identifier[key] keyword[in] identifier[keys] : keyword[if] identifier[key] keyword[in] identifier[d_true_keys] [ identifier[true_key] ]. identifier[dtype] . identifier[names] : identifier[d_true_keys] [ identifier[true_key] ]= identifier[pd] . identifier[DataFrame] . identifier[from_records] ( identifier[d_true_keys] [ identifier[true_key] ], identifier[index] = identifier[key] ) keyword[break] identifier[d_true_keys] [ identifier[true_key] ]. identifier[index] = identifier[d_true_keys] [ identifier[true_key] ]. identifier[index] . identifier[astype] ( literal[string] ) keyword[for] identifier[c] keyword[in] identifier[d_true_keys] [ identifier[true_key] ]. identifier[columns] : keyword[if] identifier[is_string_dtype] ( identifier[d_true_keys] [ identifier[true_key] ][ identifier[c] ]): identifier[d_true_keys] [ identifier[true_key] ][ identifier[c] ]= identifier[pd] . identifier[Index] ( identifier[d_true_keys] [ identifier[true_key] ][ identifier[c] ]). identifier[astype] ( literal[string] ). identifier[values] identifier[k_to_delete] =[] identifier[items] =( identifier[ddata] . identifier[items] () keyword[if] identifier[uns_is_not_key] keyword[else] identifier[ddata] [ literal[string] ]. identifier[items] () keyword[if] literal[string] keyword[in] identifier[ddata] keyword[else] [] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[items] : keyword[if] identifier[k] . identifier[endswith] ( literal[string] ): identifier[k_stripped] = identifier[k] . identifier[replace] ( literal[string] , literal[string] ) keyword[if] identifier[isinstance] ( identifier[v] ,( identifier[str] , identifier[int] )): identifier[v] =[ identifier[v] ] keyword[for] identifier[ann] keyword[in] [ literal[string] , literal[string] ]: keyword[if] identifier[k_stripped] keyword[in] identifier[d_true_keys] [ identifier[ann] ]: identifier[d_true_keys] [ identifier[ann] ][ identifier[k_stripped] ]= identifier[pd] . identifier[Categorical] . identifier[from_codes] ( identifier[codes] = identifier[d_true_keys] [ identifier[ann] ][ identifier[k_stripped] ]. identifier[values] , identifier[categories] = identifier[v] , ) identifier[k_to_delete] . identifier[append] ( identifier[k] ) keyword[for] identifier[k] keyword[in] identifier[k_to_delete] : keyword[if] identifier[uns_is_not_key] : keyword[del] identifier[ddata] [ identifier[k] ] keyword[else] : keyword[del] identifier[ddata] [ literal[string] ][ identifier[k] ] identifier[X] = identifier[d_true_keys] [ literal[string] ] identifier[obs] = identifier[d_true_keys] [ literal[string] ] identifier[obsm] = identifier[d_true_keys] [ literal[string] ] identifier[var] = identifier[d_true_keys] [ literal[string] ] identifier[varm] = identifier[d_true_keys] [ literal[string] ] identifier[layers] = identifier[d_true_keys] [ literal[string] ] identifier[raw] = keyword[None] keyword[if] literal[string] keyword[in] identifier[ddata] : identifier[raw] ={} identifier[raw] [ literal[string] ]= identifier[ddata] [ literal[string] ] keyword[del] identifier[ddata] [ literal[string] ] identifier[raw] [ literal[string] ]= identifier[pd] . identifier[DataFrame] . identifier[from_records] ( identifier[ddata] [ literal[string] ], identifier[index] = literal[string] ) keyword[del] identifier[ddata] [ literal[string] ] identifier[raw] [ literal[string] ]. identifier[index] = identifier[raw] [ literal[string] ]. identifier[index] . identifier[astype] ( literal[string] ) keyword[for] identifier[c] keyword[in] identifier[raw] [ literal[string] ]. identifier[columns] : keyword[if] identifier[is_string_dtype] ( identifier[raw] [ literal[string] ][ identifier[c] ]): identifier[raw] [ literal[string] ][ identifier[c] ]= identifier[pd] . identifier[Index] ( identifier[raw] [ literal[string] ][ identifier[c] ]). identifier[astype] ( literal[string] ). identifier[values] keyword[if] literal[string] keyword[in] identifier[ddata] : keyword[for] identifier[k] , identifier[v] keyword[in] identifier[ddata] [ literal[string] ]. identifier[items] (): keyword[if] identifier[k] . identifier[endswith] ( literal[string] ): identifier[k_stripped] = identifier[k] . identifier[replace] ( literal[string] , literal[string] ) keyword[if] identifier[isinstance] ( identifier[v] ,( identifier[str] , identifier[int] )): identifier[v] =[ identifier[v] ] identifier[raw] [ literal[string] ][ identifier[k_stripped] ]= identifier[pd] . identifier[Categorical] . identifier[from_codes] ( identifier[codes] = identifier[raw] [ literal[string] ][ identifier[k_stripped] ]. identifier[values] , identifier[categories] = identifier[v] ) keyword[del] identifier[ddata] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[ddata] : identifier[raw] [ literal[string] ]= identifier[ddata] [ literal[string] ] keyword[del] identifier[ddata] [ literal[string] ] keyword[elif] identifier[raw] keyword[is] keyword[not] keyword[None] : identifier[raw] [ literal[string] ]= keyword[None] identifier[uns] =( identifier[ddata] keyword[if] identifier[uns_is_not_key] keyword[else] identifier[ddata] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[ddata] keyword[else] {} ) keyword[return] identifier[X] , identifier[obs] , identifier[var] , identifier[uns] , identifier[obsm] , identifier[varm] , identifier[layers] , identifier[raw]
def _args_from_dict(ddata: Mapping[str, Any]): """Allows to construct an instance of AnnData from a dictionary. Acts as interface for the communication with the hdf5 file. In particular, from a dict that has been written using ``AnnData._to_dict_fixed_width_arrays``. """ d_true_keys = {} # backwards compat uns_is_not_key = False valid_keys = [] for keys in AnnData._H5_ALIASES.values(): valid_keys += keys # depends on [control=['for'], data=['keys']] valid_keys += ['raw.X', 'raw.var', 'raw.varm', 'raw.cat'] for key in ddata.keys(): # if there is another key then the prdedefined # then we are reading the old format if key not in valid_keys: uns_is_not_key = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] for (true_key, keys) in AnnData._H5_ALIASES.items(): for key in keys: if key in ddata: d_true_keys[true_key] = ddata[key] if uns_is_not_key: del ddata[key] # depends on [control=['if'], data=[]] break # depends on [control=['if'], data=['key', 'ddata']] # depends on [control=['for'], data=['key']] else: d_true_keys[true_key] = None # depends on [control=['for'], data=[]] # transform recarray to dataframe for (true_key, keys) in AnnData._H5_ALIASES_NAMES.items(): if d_true_keys[true_key] is not None: for key in keys: if key in d_true_keys[true_key].dtype.names: d_true_keys[true_key] = pd.DataFrame.from_records(d_true_keys[true_key], index=key) break # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=['key']] d_true_keys[true_key].index = d_true_keys[true_key].index.astype('U') # transform to unicode string # TODO: this is quite a hack for c in d_true_keys[true_key].columns: if is_string_dtype(d_true_keys[true_key][c]): d_true_keys[true_key][c] = pd.Index(d_true_keys[true_key][c]).astype('U').values # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # these are the category fields k_to_delete = [] items = ddata.items() if uns_is_not_key else ddata['uns'].items() if 'uns' in ddata else [] for (k, v) in items: if k.endswith('_categories'): k_stripped = k.replace('_categories', '') if isinstance(v, (str, int)): # fix categories with a single category v = [v] # depends on [control=['if'], data=[]] for ann in ['obs', 'var']: if k_stripped in d_true_keys[ann]: d_true_keys[ann][k_stripped] = pd.Categorical.from_codes(codes=d_true_keys[ann][k_stripped].values, categories=v) # depends on [control=['if'], data=['k_stripped']] # depends on [control=['for'], data=['ann']] k_to_delete.append(k) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] for k in k_to_delete: if uns_is_not_key: del ddata[k] # depends on [control=['if'], data=[]] else: del ddata['uns'][k] # depends on [control=['for'], data=['k']] # assign the variables X = d_true_keys['X'] obs = d_true_keys['obs'] obsm = d_true_keys['obsm'] var = d_true_keys['var'] varm = d_true_keys['varm'] layers = d_true_keys['layers'] raw = None if 'raw.X' in ddata: raw = {} raw['X'] = ddata['raw.X'] del ddata['raw.X'] # get the dataframe raw['var'] = pd.DataFrame.from_records(ddata['raw.var'], index='index') del ddata['raw.var'] raw['var'].index = raw['var'].index.astype('U') # transform to unicode string for c in raw['var'].columns: if is_string_dtype(raw['var'][c]): raw['var'][c] = pd.Index(raw['var'][c]).astype('U').values # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] # these are the category fields if 'raw.cat' in ddata: # old h5ad didn't have that field for (k, v) in ddata['raw.cat'].items(): if k.endswith('_categories'): k_stripped = k.replace('_categories', '') if isinstance(v, (str, int)): # fix categories with a single category v = [v] # depends on [control=['if'], data=[]] raw['var'][k_stripped] = pd.Categorical.from_codes(codes=raw['var'][k_stripped].values, categories=v) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] del ddata['raw.cat'] # depends on [control=['if'], data=['ddata']] # depends on [control=['if'], data=['ddata']] if 'raw.varm' in ddata: raw['varm'] = ddata['raw.varm'] del ddata['raw.varm'] # depends on [control=['if'], data=['ddata']] elif raw is not None: raw['varm'] = None # depends on [control=['if'], data=['raw']] # the remaining fields are the unstructured annotation uns = ddata if uns_is_not_key else ddata['uns'] if 'uns' in ddata else {} return (X, obs, var, uns, obsm, varm, layers, raw)
def parse(self, rrstr): # type: (bytes) -> None ''' Parse a Rock Ridge Continuation Entry record out of a string. Parameters: rrstr - The string to parse the record out of. Returns: Nothing. ''' if self._initialized: raise pycdlibexception.PyCdlibInternalError('CE record already initialized!') (su_len, su_entry_version_unused, bl_cont_area_le, bl_cont_area_be, offset_cont_area_le, offset_cont_area_be, len_cont_area_le, len_cont_area_be) = struct.unpack_from('=BBLLLLLL', rrstr[:28], 2) # We assume that the caller has already checked the su_entry_version, # so we don't bother. if su_len != RRCERecord.length(): raise pycdlibexception.PyCdlibInvalidISO('Invalid length on rock ridge extension') if bl_cont_area_le != utils.swab_32bit(bl_cont_area_be): raise pycdlibexception.PyCdlibInvalidISO('CE record big and little endian continuation area do not agree') if offset_cont_area_le != utils.swab_32bit(offset_cont_area_be): raise pycdlibexception.PyCdlibInvalidISO('CE record big and little endian continuation area offset do not agree') if len_cont_area_le != utils.swab_32bit(len_cont_area_be): raise pycdlibexception.PyCdlibInvalidISO('CE record big and little endian continuation area length do not agree') self.bl_cont_area = bl_cont_area_le self.offset_cont_area = offset_cont_area_le self.len_cont_area = len_cont_area_le self._initialized = True
def function[parse, parameter[self, rrstr]]: constant[ Parse a Rock Ridge Continuation Entry record out of a string. Parameters: rrstr - The string to parse the record out of. Returns: Nothing. ] if name[self]._initialized begin[:] <ast.Raise object at 0x7da18f720550> <ast.Tuple object at 0x7da18f720970> assign[=] call[name[struct].unpack_from, parameter[constant[=BBLLLLLL], call[name[rrstr]][<ast.Slice object at 0x7da18f721ff0>], constant[2]]] if compare[name[su_len] not_equal[!=] call[name[RRCERecord].length, parameter[]]] begin[:] <ast.Raise object at 0x7da18f722260> if compare[name[bl_cont_area_le] not_equal[!=] call[name[utils].swab_32bit, parameter[name[bl_cont_area_be]]]] begin[:] <ast.Raise object at 0x7da18f721f90> if compare[name[offset_cont_area_le] not_equal[!=] call[name[utils].swab_32bit, parameter[name[offset_cont_area_be]]]] begin[:] <ast.Raise object at 0x7da18f721b70> if compare[name[len_cont_area_le] not_equal[!=] call[name[utils].swab_32bit, parameter[name[len_cont_area_be]]]] begin[:] <ast.Raise object at 0x7da18f7230d0> name[self].bl_cont_area assign[=] name[bl_cont_area_le] name[self].offset_cont_area assign[=] name[offset_cont_area_le] name[self].len_cont_area assign[=] name[len_cont_area_le] name[self]._initialized assign[=] constant[True]
keyword[def] identifier[parse] ( identifier[self] , identifier[rrstr] ): literal[string] keyword[if] identifier[self] . identifier[_initialized] : keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInternalError] ( literal[string] ) ( identifier[su_len] , identifier[su_entry_version_unused] , identifier[bl_cont_area_le] , identifier[bl_cont_area_be] , identifier[offset_cont_area_le] , identifier[offset_cont_area_be] , identifier[len_cont_area_le] , identifier[len_cont_area_be] )= identifier[struct] . identifier[unpack_from] ( literal[string] , identifier[rrstr] [: literal[int] ], literal[int] ) keyword[if] identifier[su_len] != identifier[RRCERecord] . identifier[length] (): keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInvalidISO] ( literal[string] ) keyword[if] identifier[bl_cont_area_le] != identifier[utils] . identifier[swab_32bit] ( identifier[bl_cont_area_be] ): keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInvalidISO] ( literal[string] ) keyword[if] identifier[offset_cont_area_le] != identifier[utils] . identifier[swab_32bit] ( identifier[offset_cont_area_be] ): keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInvalidISO] ( literal[string] ) keyword[if] identifier[len_cont_area_le] != identifier[utils] . identifier[swab_32bit] ( identifier[len_cont_area_be] ): keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInvalidISO] ( literal[string] ) identifier[self] . identifier[bl_cont_area] = identifier[bl_cont_area_le] identifier[self] . identifier[offset_cont_area] = identifier[offset_cont_area_le] identifier[self] . identifier[len_cont_area] = identifier[len_cont_area_le] identifier[self] . identifier[_initialized] = keyword[True]
def parse(self, rrstr): # type: (bytes) -> None '\n Parse a Rock Ridge Continuation Entry record out of a string.\n\n Parameters:\n rrstr - The string to parse the record out of.\n Returns:\n Nothing.\n ' if self._initialized: raise pycdlibexception.PyCdlibInternalError('CE record already initialized!') # depends on [control=['if'], data=[]] (su_len, su_entry_version_unused, bl_cont_area_le, bl_cont_area_be, offset_cont_area_le, offset_cont_area_be, len_cont_area_le, len_cont_area_be) = struct.unpack_from('=BBLLLLLL', rrstr[:28], 2) # We assume that the caller has already checked the su_entry_version, # so we don't bother. if su_len != RRCERecord.length(): raise pycdlibexception.PyCdlibInvalidISO('Invalid length on rock ridge extension') # depends on [control=['if'], data=[]] if bl_cont_area_le != utils.swab_32bit(bl_cont_area_be): raise pycdlibexception.PyCdlibInvalidISO('CE record big and little endian continuation area do not agree') # depends on [control=['if'], data=[]] if offset_cont_area_le != utils.swab_32bit(offset_cont_area_be): raise pycdlibexception.PyCdlibInvalidISO('CE record big and little endian continuation area offset do not agree') # depends on [control=['if'], data=[]] if len_cont_area_le != utils.swab_32bit(len_cont_area_be): raise pycdlibexception.PyCdlibInvalidISO('CE record big and little endian continuation area length do not agree') # depends on [control=['if'], data=[]] self.bl_cont_area = bl_cont_area_le self.offset_cont_area = offset_cont_area_le self.len_cont_area = len_cont_area_le self._initialized = True
def get_terms_in_subset(ont, subset): """ Find all nodes in a subset. We assume the oboInOwl encoding of subsets, and subset IDs are IRIs """ namedGraph = get_named_graph(ont) # note subsets have an unusual encoding query = """ prefix oboInOwl: <http://www.geneontology.org/formats/oboInOwl#> SELECT ?c ? WHERE {{ GRAPH <{g}> {{ ?c oboInOwl:inSubset ?s ; rdfs:label ?l FILTER regex(?s,'#{s}$','i') }} }} """.format(s=subset, g=namedGraph) bindings = run_sparql(query) return [(r['c']['value'],r['l']['value']) for r in bindings]
def function[get_terms_in_subset, parameter[ont, subset]]: constant[ Find all nodes in a subset. We assume the oboInOwl encoding of subsets, and subset IDs are IRIs ] variable[namedGraph] assign[=] call[name[get_named_graph], parameter[name[ont]]] variable[query] assign[=] call[constant[ prefix oboInOwl: <http://www.geneontology.org/formats/oboInOwl#> SELECT ?c ? WHERE {{ GRAPH <{g}> {{ ?c oboInOwl:inSubset ?s ; rdfs:label ?l FILTER regex(?s,'#{s}$','i') }} }} ].format, parameter[]] variable[bindings] assign[=] call[name[run_sparql], parameter[name[query]]] return[<ast.ListComp object at 0x7da20e9562c0>]
keyword[def] identifier[get_terms_in_subset] ( identifier[ont] , identifier[subset] ): literal[string] identifier[namedGraph] = identifier[get_named_graph] ( identifier[ont] ) identifier[query] = literal[string] . identifier[format] ( identifier[s] = identifier[subset] , identifier[g] = identifier[namedGraph] ) identifier[bindings] = identifier[run_sparql] ( identifier[query] ) keyword[return] [( identifier[r] [ literal[string] ][ literal[string] ], identifier[r] [ literal[string] ][ literal[string] ]) keyword[for] identifier[r] keyword[in] identifier[bindings] ]
def get_terms_in_subset(ont, subset): """ Find all nodes in a subset. We assume the oboInOwl encoding of subsets, and subset IDs are IRIs """ namedGraph = get_named_graph(ont) # note subsets have an unusual encoding query = "\n prefix oboInOwl: <http://www.geneontology.org/formats/oboInOwl#>\n SELECT ?c ? WHERE {{\n GRAPH <{g}> {{\n ?c oboInOwl:inSubset ?s ;\n rdfs:label ?l\n FILTER regex(?s,'#{s}$','i')\n }}\n }}\n ".format(s=subset, g=namedGraph) bindings = run_sparql(query) return [(r['c']['value'], r['l']['value']) for r in bindings]
async def delete(self): """ Delete task (in any state) permanently. Returns `True` is task is deleted. """ the_tuple = await self.queue.delete(self.tube, self.task_id) self.update_from_tuple(the_tuple) return bool(self.state == DONE)
<ast.AsyncFunctionDef object at 0x7da2044c3a30>
keyword[async] keyword[def] identifier[delete] ( identifier[self] ): literal[string] identifier[the_tuple] = keyword[await] identifier[self] . identifier[queue] . identifier[delete] ( identifier[self] . identifier[tube] , identifier[self] . identifier[task_id] ) identifier[self] . identifier[update_from_tuple] ( identifier[the_tuple] ) keyword[return] identifier[bool] ( identifier[self] . identifier[state] == identifier[DONE] )
async def delete(self): """ Delete task (in any state) permanently. Returns `True` is task is deleted. """ the_tuple = await self.queue.delete(self.tube, self.task_id) self.update_from_tuple(the_tuple) return bool(self.state == DONE)
def search(self, query_string, **kwargs): """ The main search method :param query_string: The string to pass to Elasticsearch. e.g. '*:*' :param kwargs: start_offset, end_offset, result_class :return: result_class instance """ self.index_name = self._index_name_for_language(translation.get_language()) # self.log.debug('search method called (%s): %s' % # (translation.get_language(), query_string)) return super(ElasticsearchMultilingualSearchBackend, self).search(query_string, **kwargs)
def function[search, parameter[self, query_string]]: constant[ The main search method :param query_string: The string to pass to Elasticsearch. e.g. '*:*' :param kwargs: start_offset, end_offset, result_class :return: result_class instance ] name[self].index_name assign[=] call[name[self]._index_name_for_language, parameter[call[name[translation].get_language, parameter[]]]] return[call[call[name[super], parameter[name[ElasticsearchMultilingualSearchBackend], name[self]]].search, parameter[name[query_string]]]]
keyword[def] identifier[search] ( identifier[self] , identifier[query_string] ,** identifier[kwargs] ): literal[string] identifier[self] . identifier[index_name] = identifier[self] . identifier[_index_name_for_language] ( identifier[translation] . identifier[get_language] ()) keyword[return] identifier[super] ( identifier[ElasticsearchMultilingualSearchBackend] , identifier[self] ). identifier[search] ( identifier[query_string] ,** identifier[kwargs] )
def search(self, query_string, **kwargs): """ The main search method :param query_string: The string to pass to Elasticsearch. e.g. '*:*' :param kwargs: start_offset, end_offset, result_class :return: result_class instance """ self.index_name = self._index_name_for_language(translation.get_language()) # self.log.debug('search method called (%s): %s' % # (translation.get_language(), query_string)) return super(ElasticsearchMultilingualSearchBackend, self).search(query_string, **kwargs)
def header_echo(cls, request, api_key: (Ptypes.header, String('API key'))) -> [ (200, 'Ok', String)]: '''Echo the header parameter.''' log.info('Echoing header param, value is: {}'.format(api_key)) for i in range(randint(0, MAX_LOOP_DURATION)): yield msg = 'The value sent was: {}'.format(api_key) Respond(200, msg)
def function[header_echo, parameter[cls, request, api_key]]: constant[Echo the header parameter.] call[name[log].info, parameter[call[constant[Echoing header param, value is: {}].format, parameter[name[api_key]]]]] for taget[name[i]] in starred[call[name[range], parameter[call[name[randint], parameter[constant[0], name[MAX_LOOP_DURATION]]]]]] begin[:] <ast.Yield object at 0x7da2045643a0> variable[msg] assign[=] call[constant[The value sent was: {}].format, parameter[name[api_key]]] call[name[Respond], parameter[constant[200], name[msg]]]
keyword[def] identifier[header_echo] ( identifier[cls] , identifier[request] , identifier[api_key] :( identifier[Ptypes] . identifier[header] , identifier[String] ( literal[string] )))->[ ( literal[int] , literal[string] , identifier[String] )]: literal[string] identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[api_key] )) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[randint] ( literal[int] , identifier[MAX_LOOP_DURATION] )): keyword[yield] identifier[msg] = literal[string] . identifier[format] ( identifier[api_key] ) identifier[Respond] ( literal[int] , identifier[msg] )
def header_echo(cls, request, api_key: (Ptypes.header, String('API key'))) -> [(200, 'Ok', String)]: """Echo the header parameter.""" log.info('Echoing header param, value is: {}'.format(api_key)) for i in range(randint(0, MAX_LOOP_DURATION)): yield # depends on [control=['for'], data=[]] msg = 'The value sent was: {}'.format(api_key) Respond(200, msg)
def get_all_alert(self, **kwargs): # noqa: E501 """Get all alerts for a customer # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_all_alert(async_req=True) >>> result = thread.get() :param async_req bool :param int offset: :param int limit: :return: ResponseContainerPagedAlert If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_all_alert_with_http_info(**kwargs) # noqa: E501 else: (data) = self.get_all_alert_with_http_info(**kwargs) # noqa: E501 return data
def function[get_all_alert, parameter[self]]: constant[Get all alerts for a customer # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_all_alert(async_req=True) >>> result = thread.get() :param async_req bool :param int offset: :param int limit: :return: ResponseContainerPagedAlert If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[async_req]]] begin[:] return[call[name[self].get_all_alert_with_http_info, parameter[]]]
keyword[def] identifier[get_all_alert] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[self] . identifier[get_all_alert_with_http_info] (** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[self] . identifier[get_all_alert_with_http_info] (** identifier[kwargs] ) keyword[return] identifier[data]
def get_all_alert(self, **kwargs): # noqa: E501 'Get all alerts for a customer # noqa: E501\n\n # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_all_alert(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param int offset:\n :param int limit:\n :return: ResponseContainerPagedAlert\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_all_alert_with_http_info(**kwargs) # noqa: E501 # depends on [control=['if'], data=[]] else: data = self.get_all_alert_with_http_info(**kwargs) # noqa: E501 return data
def urltool(classqname, filt, reverse): """ Dump all urls branching from a class as OpenAPI 3 documentation The class must be given as a FQPN which points to a Klein() instance. Apply optional [FILT] as a regular expression searching within urls. For example, to match all urls beginning with api, you might use '^/api' """ filt = re.compile(filt or '.*') rootCls = namedAny(classqname) rules = list(_iterClass(rootCls)) arr = [] for item in sorted(rules): if item.subKlein: continue matched = filt.search(item.rulePath) matched = not matched if reverse else matched if matched: arr.append(tuple(item.toOpenAPIPath())) openapi3 = openapi.OpenAPI() for pathPath, pathItem in arr: if pathPath in openapi3.paths: openapi3.paths[pathPath].merge(pathItem) else: openapi3.paths[pathPath] = pathItem print(yaml.dump(openapi3, default_flow_style=False))
def function[urltool, parameter[classqname, filt, reverse]]: constant[ Dump all urls branching from a class as OpenAPI 3 documentation The class must be given as a FQPN which points to a Klein() instance. Apply optional [FILT] as a regular expression searching within urls. For example, to match all urls beginning with api, you might use '^/api' ] variable[filt] assign[=] call[name[re].compile, parameter[<ast.BoolOp object at 0x7da204567a90>]] variable[rootCls] assign[=] call[name[namedAny], parameter[name[classqname]]] variable[rules] assign[=] call[name[list], parameter[call[name[_iterClass], parameter[name[rootCls]]]]] variable[arr] assign[=] list[[]] for taget[name[item]] in starred[call[name[sorted], parameter[name[rules]]]] begin[:] if name[item].subKlein begin[:] continue variable[matched] assign[=] call[name[filt].search, parameter[name[item].rulePath]] variable[matched] assign[=] <ast.IfExp object at 0x7da2041da830> if name[matched] begin[:] call[name[arr].append, parameter[call[name[tuple], parameter[call[name[item].toOpenAPIPath, parameter[]]]]]] variable[openapi3] assign[=] call[name[openapi].OpenAPI, parameter[]] for taget[tuple[[<ast.Name object at 0x7da2041db6d0>, <ast.Name object at 0x7da2041d8490>]]] in starred[name[arr]] begin[:] if compare[name[pathPath] in name[openapi3].paths] begin[:] call[call[name[openapi3].paths][name[pathPath]].merge, parameter[name[pathItem]]] call[name[print], parameter[call[name[yaml].dump, parameter[name[openapi3]]]]]
keyword[def] identifier[urltool] ( identifier[classqname] , identifier[filt] , identifier[reverse] ): literal[string] identifier[filt] = identifier[re] . identifier[compile] ( identifier[filt] keyword[or] literal[string] ) identifier[rootCls] = identifier[namedAny] ( identifier[classqname] ) identifier[rules] = identifier[list] ( identifier[_iterClass] ( identifier[rootCls] )) identifier[arr] =[] keyword[for] identifier[item] keyword[in] identifier[sorted] ( identifier[rules] ): keyword[if] identifier[item] . identifier[subKlein] : keyword[continue] identifier[matched] = identifier[filt] . identifier[search] ( identifier[item] . identifier[rulePath] ) identifier[matched] = keyword[not] identifier[matched] keyword[if] identifier[reverse] keyword[else] identifier[matched] keyword[if] identifier[matched] : identifier[arr] . identifier[append] ( identifier[tuple] ( identifier[item] . identifier[toOpenAPIPath] ())) identifier[openapi3] = identifier[openapi] . identifier[OpenAPI] () keyword[for] identifier[pathPath] , identifier[pathItem] keyword[in] identifier[arr] : keyword[if] identifier[pathPath] keyword[in] identifier[openapi3] . identifier[paths] : identifier[openapi3] . identifier[paths] [ identifier[pathPath] ]. identifier[merge] ( identifier[pathItem] ) keyword[else] : identifier[openapi3] . identifier[paths] [ identifier[pathPath] ]= identifier[pathItem] identifier[print] ( identifier[yaml] . identifier[dump] ( identifier[openapi3] , identifier[default_flow_style] = keyword[False] ))
def urltool(classqname, filt, reverse): """ Dump all urls branching from a class as OpenAPI 3 documentation The class must be given as a FQPN which points to a Klein() instance. Apply optional [FILT] as a regular expression searching within urls. For example, to match all urls beginning with api, you might use '^/api' """ filt = re.compile(filt or '.*') rootCls = namedAny(classqname) rules = list(_iterClass(rootCls)) arr = [] for item in sorted(rules): if item.subKlein: continue # depends on [control=['if'], data=[]] matched = filt.search(item.rulePath) matched = not matched if reverse else matched if matched: arr.append(tuple(item.toOpenAPIPath())) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] openapi3 = openapi.OpenAPI() for (pathPath, pathItem) in arr: if pathPath in openapi3.paths: openapi3.paths[pathPath].merge(pathItem) # depends on [control=['if'], data=['pathPath']] else: openapi3.paths[pathPath] = pathItem # depends on [control=['for'], data=[]] print(yaml.dump(openapi3, default_flow_style=False))
def determine_coords(list_of_variable_dicts): # type: (List[Dict]) -> Tuple[Set, Set] """Given a list of dicts with xarray object values, identify coordinates. Parameters ---------- list_of_variable_dicts : list of dict or Dataset objects Of the same form as the arguments to expand_variable_dicts. Returns ------- coord_names : set of variable names noncoord_names : set of variable names All variable found in the input should appear in either the set of coordinate or non-coordinate names. """ from .dataarray import DataArray from .dataset import Dataset coord_names = set() # type: set noncoord_names = set() # type: set for variables in list_of_variable_dicts: if isinstance(variables, Dataset): coord_names.update(variables.coords) noncoord_names.update(variables.data_vars) else: for name, var in variables.items(): if isinstance(var, DataArray): coords = set(var._coords) # use private API for speed # explicitly overwritten variables should take precedence coords.discard(name) coord_names.update(coords) return coord_names, noncoord_names
def function[determine_coords, parameter[list_of_variable_dicts]]: constant[Given a list of dicts with xarray object values, identify coordinates. Parameters ---------- list_of_variable_dicts : list of dict or Dataset objects Of the same form as the arguments to expand_variable_dicts. Returns ------- coord_names : set of variable names noncoord_names : set of variable names All variable found in the input should appear in either the set of coordinate or non-coordinate names. ] from relative_module[dataarray] import module[DataArray] from relative_module[dataset] import module[Dataset] variable[coord_names] assign[=] call[name[set], parameter[]] variable[noncoord_names] assign[=] call[name[set], parameter[]] for taget[name[variables]] in starred[name[list_of_variable_dicts]] begin[:] if call[name[isinstance], parameter[name[variables], name[Dataset]]] begin[:] call[name[coord_names].update, parameter[name[variables].coords]] call[name[noncoord_names].update, parameter[name[variables].data_vars]] return[tuple[[<ast.Name object at 0x7da1b1f95a80>, <ast.Name object at 0x7da1b1f95300>]]]
keyword[def] identifier[determine_coords] ( identifier[list_of_variable_dicts] ): literal[string] keyword[from] . identifier[dataarray] keyword[import] identifier[DataArray] keyword[from] . identifier[dataset] keyword[import] identifier[Dataset] identifier[coord_names] = identifier[set] () identifier[noncoord_names] = identifier[set] () keyword[for] identifier[variables] keyword[in] identifier[list_of_variable_dicts] : keyword[if] identifier[isinstance] ( identifier[variables] , identifier[Dataset] ): identifier[coord_names] . identifier[update] ( identifier[variables] . identifier[coords] ) identifier[noncoord_names] . identifier[update] ( identifier[variables] . identifier[data_vars] ) keyword[else] : keyword[for] identifier[name] , identifier[var] keyword[in] identifier[variables] . identifier[items] (): keyword[if] identifier[isinstance] ( identifier[var] , identifier[DataArray] ): identifier[coords] = identifier[set] ( identifier[var] . identifier[_coords] ) identifier[coords] . identifier[discard] ( identifier[name] ) identifier[coord_names] . identifier[update] ( identifier[coords] ) keyword[return] identifier[coord_names] , identifier[noncoord_names]
def determine_coords(list_of_variable_dicts): # type: (List[Dict]) -> Tuple[Set, Set] 'Given a list of dicts with xarray object values, identify coordinates.\n\n Parameters\n ----------\n list_of_variable_dicts : list of dict or Dataset objects\n Of the same form as the arguments to expand_variable_dicts.\n\n Returns\n -------\n coord_names : set of variable names\n noncoord_names : set of variable names\n All variable found in the input should appear in either the set of\n coordinate or non-coordinate names.\n ' from .dataarray import DataArray from .dataset import Dataset coord_names = set() # type: set noncoord_names = set() # type: set for variables in list_of_variable_dicts: if isinstance(variables, Dataset): coord_names.update(variables.coords) noncoord_names.update(variables.data_vars) # depends on [control=['if'], data=[]] else: for (name, var) in variables.items(): if isinstance(var, DataArray): coords = set(var._coords) # use private API for speed # explicitly overwritten variables should take precedence coords.discard(name) coord_names.update(coords) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['variables']] return (coord_names, noncoord_names)
def append_column(self, header, column): """Append a column to end of the table. Parameters ---------- header : str Title of the column column : iterable Any iterable of appropriate length. """ self.insert_column(self._column_count, header, column)
def function[append_column, parameter[self, header, column]]: constant[Append a column to end of the table. Parameters ---------- header : str Title of the column column : iterable Any iterable of appropriate length. ] call[name[self].insert_column, parameter[name[self]._column_count, name[header], name[column]]]
keyword[def] identifier[append_column] ( identifier[self] , identifier[header] , identifier[column] ): literal[string] identifier[self] . identifier[insert_column] ( identifier[self] . identifier[_column_count] , identifier[header] , identifier[column] )
def append_column(self, header, column): """Append a column to end of the table. Parameters ---------- header : str Title of the column column : iterable Any iterable of appropriate length. """ self.insert_column(self._column_count, header, column)
def __system_multiCall(calls, **kwargs): """ Call multiple RPC methods at once. :param calls: An array of struct like {"methodName": string, "params": array } :param kwargs: Internal data :type calls: list :type kwargs: dict :return: """ if not isinstance(calls, list): raise RPCInvalidParams('system.multicall first argument should be a list, {} given.'.format(type(calls))) handler = kwargs.get(HANDLER_KEY) results = [] for call in calls: try: result = handler.execute_procedure(call['methodName'], args=call.get('params')) # From https://mirrors.talideon.com/articles/multicall.html: # "Notice that regular return values are always nested inside a one-element array. This allows you to # return structs from functions without confusing them with faults." results.append([result]) except RPCException as e: results.append({ 'faultCode': e.code, 'faultString': e.message, }) except Exception as e: results.append({ 'faultCode': RPC_INTERNAL_ERROR, 'faultString': str(e), }) return results
def function[__system_multiCall, parameter[calls]]: constant[ Call multiple RPC methods at once. :param calls: An array of struct like {"methodName": string, "params": array } :param kwargs: Internal data :type calls: list :type kwargs: dict :return: ] if <ast.UnaryOp object at 0x7da1b04ec640> begin[:] <ast.Raise object at 0x7da1b04ec070> variable[handler] assign[=] call[name[kwargs].get, parameter[name[HANDLER_KEY]]] variable[results] assign[=] list[[]] for taget[name[call]] in starred[name[calls]] begin[:] <ast.Try object at 0x7da1b04efa60> return[name[results]]
keyword[def] identifier[__system_multiCall] ( identifier[calls] ,** identifier[kwargs] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[calls] , identifier[list] ): keyword[raise] identifier[RPCInvalidParams] ( literal[string] . identifier[format] ( identifier[type] ( identifier[calls] ))) identifier[handler] = identifier[kwargs] . identifier[get] ( identifier[HANDLER_KEY] ) identifier[results] =[] keyword[for] identifier[call] keyword[in] identifier[calls] : keyword[try] : identifier[result] = identifier[handler] . identifier[execute_procedure] ( identifier[call] [ literal[string] ], identifier[args] = identifier[call] . identifier[get] ( literal[string] )) identifier[results] . identifier[append] ([ identifier[result] ]) keyword[except] identifier[RPCException] keyword[as] identifier[e] : identifier[results] . identifier[append] ({ literal[string] : identifier[e] . identifier[code] , literal[string] : identifier[e] . identifier[message] , }) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[results] . identifier[append] ({ literal[string] : identifier[RPC_INTERNAL_ERROR] , literal[string] : identifier[str] ( identifier[e] ), }) keyword[return] identifier[results]
def __system_multiCall(calls, **kwargs): """ Call multiple RPC methods at once. :param calls: An array of struct like {"methodName": string, "params": array } :param kwargs: Internal data :type calls: list :type kwargs: dict :return: """ if not isinstance(calls, list): raise RPCInvalidParams('system.multicall first argument should be a list, {} given.'.format(type(calls))) # depends on [control=['if'], data=[]] handler = kwargs.get(HANDLER_KEY) results = [] for call in calls: try: result = handler.execute_procedure(call['methodName'], args=call.get('params')) # From https://mirrors.talideon.com/articles/multicall.html: # "Notice that regular return values are always nested inside a one-element array. This allows you to # return structs from functions without confusing them with faults." results.append([result]) # depends on [control=['try'], data=[]] except RPCException as e: results.append({'faultCode': e.code, 'faultString': e.message}) # depends on [control=['except'], data=['e']] except Exception as e: results.append({'faultCode': RPC_INTERNAL_ERROR, 'faultString': str(e)}) # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['call']] return results
def _set_peer(self, v, load=False): """ Setter method for peer, mapped from YANG variable /ntp/peer (list) If this variable is read-only (config: false) in the source YANG file, then _set_peer is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_peer() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("peer_ip",peer.peer, yang_name="peer", rest_name="peer", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='peer-ip', extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'Configure NTP peer', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'sort-priority': u'36', u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'ntp-peer'}}), is_container='list', yang_name="peer", rest_name="peer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'Configure NTP peer', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'sort-priority': u'36', u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'ntp-peer'}}, namespace='urn:brocade.com:mgmt:brocade-ntp', defining_module='brocade-ntp', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """peer must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("peer_ip",peer.peer, yang_name="peer", rest_name="peer", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='peer-ip', extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'Configure NTP peer', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'sort-priority': u'36', u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'ntp-peer'}}), is_container='list', yang_name="peer", rest_name="peer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'Configure NTP peer', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'sort-priority': u'36', u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'ntp-peer'}}, namespace='urn:brocade.com:mgmt:brocade-ntp', defining_module='brocade-ntp', yang_type='list', is_config=True)""", }) self.__peer = t if hasattr(self, '_set'): self._set()
def function[_set_peer, parameter[self, v, load]]: constant[ Setter method for peer, mapped from YANG variable /ntp/peer (list) If this variable is read-only (config: false) in the source YANG file, then _set_peer is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_peer() directly. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da20c76ea40> name[self].__peer assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_peer] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGListType] ( literal[string] , identifier[peer] . identifier[peer] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[is_container] = literal[string] , identifier[user_ordered] = keyword[False] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[yang_keys] = literal[string] , identifier[extensions] ={ literal[string] :{ literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] }}), identifier[is_container] = literal[string] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__peer] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_peer(self, v, load=False): """ Setter method for peer, mapped from YANG variable /ntp/peer (list) If this variable is read-only (config: false) in the source YANG file, then _set_peer is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_peer() directly. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=YANGListType('peer_ip', peer.peer, yang_name='peer', rest_name='peer', parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='peer-ip', extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'Configure NTP peer', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'sort-priority': u'36', u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'ntp-peer'}}), is_container='list', yang_name='peer', rest_name='peer', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-key-sort': None, u'info': u'Configure NTP peer', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'sort-priority': u'36', u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'ntp-peer'}}, namespace='urn:brocade.com:mgmt:brocade-ntp', defining_module='brocade-ntp', yang_type='list', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'peer must be of a type compatible with list', 'defined-type': 'list', 'generated-type': 'YANGDynClass(base=YANGListType("peer_ip",peer.peer, yang_name="peer", rest_name="peer", parent=self, is_container=\'list\', user_ordered=False, path_helper=self._path_helper, yang_keys=\'peer-ip\', extensions={u\'tailf-common\': {u\'cli-suppress-key-sort\': None, u\'info\': u\'Configure NTP peer\', u\'cli-no-key-completion\': None, u\'cli-suppress-mode\': None, u\'sort-priority\': u\'36\', u\'cli-suppress-list-no\': None, u\'cli-full-no\': None, u\'cli-compact-syntax\': None, u\'cli-suppress-key-abbreviation\': None, u\'callpoint\': u\'ntp-peer\'}}), is_container=\'list\', yang_name="peer", rest_name="peer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'cli-suppress-key-sort\': None, u\'info\': u\'Configure NTP peer\', u\'cli-no-key-completion\': None, u\'cli-suppress-mode\': None, u\'sort-priority\': u\'36\', u\'cli-suppress-list-no\': None, u\'cli-full-no\': None, u\'cli-compact-syntax\': None, u\'cli-suppress-key-abbreviation\': None, u\'callpoint\': u\'ntp-peer\'}}, namespace=\'urn:brocade.com:mgmt:brocade-ntp\', defining_module=\'brocade-ntp\', yang_type=\'list\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__peer = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def StatEntryFromStat(stat, pathspec, ext_attrs = True): """Build a stat entry object from a given stat object. Args: stat: A `Stat` object. pathspec: A `PathSpec` from which `stat` was obtained. ext_attrs: Whether to include extended file attributes in the result. Returns: `StatEntry` object. """ result = rdf_client_fs.StatEntry(pathspec=pathspec) for attr in _STAT_ATTRS: value = getattr(stat.GetRaw(), attr, None) if value is None: continue # TODO(hanuszczak): Why are we doing this? value = int(value) if value < 0: value &= 0xFFFFFFFF setattr(result, attr, value) result.st_flags_linux = stat.GetLinuxFlags() result.st_flags_osx = stat.GetOsxFlags() if ext_attrs: # TODO(hanuszczak): Can we somehow incorporate extended attribute getter to # the `Stat` class? That would make the code a lot prettier but would force # `utils` to depend on `xattrs`. result.ext_attrs = list(GetExtAttrs(stat.GetPath())) return result
def function[StatEntryFromStat, parameter[stat, pathspec, ext_attrs]]: constant[Build a stat entry object from a given stat object. Args: stat: A `Stat` object. pathspec: A `PathSpec` from which `stat` was obtained. ext_attrs: Whether to include extended file attributes in the result. Returns: `StatEntry` object. ] variable[result] assign[=] call[name[rdf_client_fs].StatEntry, parameter[]] for taget[name[attr]] in starred[name[_STAT_ATTRS]] begin[:] variable[value] assign[=] call[name[getattr], parameter[call[name[stat].GetRaw, parameter[]], name[attr], constant[None]]] if compare[name[value] is constant[None]] begin[:] continue variable[value] assign[=] call[name[int], parameter[name[value]]] if compare[name[value] less[<] constant[0]] begin[:] <ast.AugAssign object at 0x7da1b1c0dff0> call[name[setattr], parameter[name[result], name[attr], name[value]]] name[result].st_flags_linux assign[=] call[name[stat].GetLinuxFlags, parameter[]] name[result].st_flags_osx assign[=] call[name[stat].GetOsxFlags, parameter[]] if name[ext_attrs] begin[:] name[result].ext_attrs assign[=] call[name[list], parameter[call[name[GetExtAttrs], parameter[call[name[stat].GetPath, parameter[]]]]]] return[name[result]]
keyword[def] identifier[StatEntryFromStat] ( identifier[stat] , identifier[pathspec] , identifier[ext_attrs] = keyword[True] ): literal[string] identifier[result] = identifier[rdf_client_fs] . identifier[StatEntry] ( identifier[pathspec] = identifier[pathspec] ) keyword[for] identifier[attr] keyword[in] identifier[_STAT_ATTRS] : identifier[value] = identifier[getattr] ( identifier[stat] . identifier[GetRaw] (), identifier[attr] , keyword[None] ) keyword[if] identifier[value] keyword[is] keyword[None] : keyword[continue] identifier[value] = identifier[int] ( identifier[value] ) keyword[if] identifier[value] < literal[int] : identifier[value] &= literal[int] identifier[setattr] ( identifier[result] , identifier[attr] , identifier[value] ) identifier[result] . identifier[st_flags_linux] = identifier[stat] . identifier[GetLinuxFlags] () identifier[result] . identifier[st_flags_osx] = identifier[stat] . identifier[GetOsxFlags] () keyword[if] identifier[ext_attrs] : identifier[result] . identifier[ext_attrs] = identifier[list] ( identifier[GetExtAttrs] ( identifier[stat] . identifier[GetPath] ())) keyword[return] identifier[result]
def StatEntryFromStat(stat, pathspec, ext_attrs=True): """Build a stat entry object from a given stat object. Args: stat: A `Stat` object. pathspec: A `PathSpec` from which `stat` was obtained. ext_attrs: Whether to include extended file attributes in the result. Returns: `StatEntry` object. """ result = rdf_client_fs.StatEntry(pathspec=pathspec) for attr in _STAT_ATTRS: value = getattr(stat.GetRaw(), attr, None) if value is None: continue # depends on [control=['if'], data=[]] # TODO(hanuszczak): Why are we doing this? value = int(value) if value < 0: value &= 4294967295 # depends on [control=['if'], data=['value']] setattr(result, attr, value) # depends on [control=['for'], data=['attr']] result.st_flags_linux = stat.GetLinuxFlags() result.st_flags_osx = stat.GetOsxFlags() if ext_attrs: # TODO(hanuszczak): Can we somehow incorporate extended attribute getter to # the `Stat` class? That would make the code a lot prettier but would force # `utils` to depend on `xattrs`. result.ext_attrs = list(GetExtAttrs(stat.GetPath())) # depends on [control=['if'], data=[]] return result
def carmichael() -> Iterator[int]: """Composite numbers n such that a^(n-1) == 1 (mod n) for every a coprime to n. https://oeis.org/A002997 """ for m in composite(): for a in range(2, m): if pow(a, m, m) != a: break else: yield m
def function[carmichael, parameter[]]: constant[Composite numbers n such that a^(n-1) == 1 (mod n) for every a coprime to n. https://oeis.org/A002997 ] for taget[name[m]] in starred[call[name[composite], parameter[]]] begin[:] for taget[name[a]] in starred[call[name[range], parameter[constant[2], name[m]]]] begin[:] if compare[call[name[pow], parameter[name[a], name[m], name[m]]] not_equal[!=] name[a]] begin[:] break
keyword[def] identifier[carmichael] ()-> identifier[Iterator] [ identifier[int] ]: literal[string] keyword[for] identifier[m] keyword[in] identifier[composite] (): keyword[for] identifier[a] keyword[in] identifier[range] ( literal[int] , identifier[m] ): keyword[if] identifier[pow] ( identifier[a] , identifier[m] , identifier[m] )!= identifier[a] : keyword[break] keyword[else] : keyword[yield] identifier[m]
def carmichael() -> Iterator[int]: """Composite numbers n such that a^(n-1) == 1 (mod n) for every a coprime to n. https://oeis.org/A002997 """ for m in composite(): for a in range(2, m): if pow(a, m, m) != a: break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['a']] else: yield m # depends on [control=['for'], data=['m']]
def timTuVi(cuc, ngaySinhAmLich): """Tìm vị trí của sao Tử vi Args: cuc (TYPE): Description ngaySinhAmLich (TYPE): Description Returns: TYPE: Description Raises: Exception: Description """ cungDan = 3 # Vị trí cung Dần ban đầu là 3 cucBanDau = cuc if cuc not in [2, 3, 4, 5, 6]: # Tránh trường hợp infinite loop raise Exception("Số cục phải là 2, 3, 4, 5, 6") while cuc < ngaySinhAmLich: cuc += cucBanDau cungDan += 1 # Dịch vị trí cung Dần saiLech = cuc - ngaySinhAmLich if saiLech % 2 is 1: saiLech = -saiLech # Nếu sai lệch là chẵn thì tiến, lẻ thì lùi return dichCung(cungDan, saiLech)
def function[timTuVi, parameter[cuc, ngaySinhAmLich]]: constant[Tìm vị trí của sao Tử vi Args: cuc (TYPE): Description ngaySinhAmLich (TYPE): Description Returns: TYPE: Description Raises: Exception: Description ] variable[cungDan] assign[=] constant[3] variable[cucBanDau] assign[=] name[cuc] if compare[name[cuc] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da20c6c7370>, <ast.Constant object at 0x7da204963460>, <ast.Constant object at 0x7da2049615a0>, <ast.Constant object at 0x7da204960d30>, <ast.Constant object at 0x7da2049635b0>]]] begin[:] <ast.Raise object at 0x7da204960b20> while compare[name[cuc] less[<] name[ngaySinhAmLich]] begin[:] <ast.AugAssign object at 0x7da204963d30> <ast.AugAssign object at 0x7da204961a50> variable[saiLech] assign[=] binary_operation[name[cuc] - name[ngaySinhAmLich]] if compare[binary_operation[name[saiLech] <ast.Mod object at 0x7da2590d6920> constant[2]] is constant[1]] begin[:] variable[saiLech] assign[=] <ast.UnaryOp object at 0x7da204962a70> return[call[name[dichCung], parameter[name[cungDan], name[saiLech]]]]
keyword[def] identifier[timTuVi] ( identifier[cuc] , identifier[ngaySinhAmLich] ): literal[string] identifier[cungDan] = literal[int] identifier[cucBanDau] = identifier[cuc] keyword[if] identifier[cuc] keyword[not] keyword[in] [ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ]: keyword[raise] identifier[Exception] ( literal[string] ) keyword[while] identifier[cuc] < identifier[ngaySinhAmLich] : identifier[cuc] += identifier[cucBanDau] identifier[cungDan] += literal[int] identifier[saiLech] = identifier[cuc] - identifier[ngaySinhAmLich] keyword[if] identifier[saiLech] % literal[int] keyword[is] literal[int] : identifier[saiLech] =- identifier[saiLech] keyword[return] identifier[dichCung] ( identifier[cungDan] , identifier[saiLech] )
def timTuVi(cuc, ngaySinhAmLich): """Tìm vị trí của sao Tử vi Args: cuc (TYPE): Description ngaySinhAmLich (TYPE): Description Returns: TYPE: Description Raises: Exception: Description """ cungDan = 3 # Vị trí cung Dần ban đầu là 3 cucBanDau = cuc if cuc not in [2, 3, 4, 5, 6]: # Tránh trường hợp infinite loop raise Exception('Số cục phải là 2, 3, 4, 5, 6') # depends on [control=['if'], data=[]] while cuc < ngaySinhAmLich: cuc += cucBanDau cungDan += 1 # Dịch vị trí cung Dần # depends on [control=['while'], data=['cuc']] saiLech = cuc - ngaySinhAmLich if saiLech % 2 is 1: saiLech = -saiLech # Nếu sai lệch là chẵn thì tiến, lẻ thì lùi # depends on [control=['if'], data=[]] return dichCung(cungDan, saiLech)
def get_attribute(self, locator, attribute, params=None, timeout=None, visible=False): """ Get attribute from element based on locator with optional parameters. Calls get_element() with expected condition: visibility of element located :param locator: locator tuple or WebElement instance :param attribute: attribute to return :param params: (optional) locator parameters :param timeout: (optional) time to wait for text (default: None) :param visible: should element be visible before getting text (default: False) :return: element attribute """ element = locator if not isinstance(element, WebElement): element = self.get_present_element(locator, params, timeout, visible) try: return element.get_attribute(attribute) except AttributeError: msg = "Element with attribute <{}> was never located!".format(attribute) raise NoSuchElementException(msg)
def function[get_attribute, parameter[self, locator, attribute, params, timeout, visible]]: constant[ Get attribute from element based on locator with optional parameters. Calls get_element() with expected condition: visibility of element located :param locator: locator tuple or WebElement instance :param attribute: attribute to return :param params: (optional) locator parameters :param timeout: (optional) time to wait for text (default: None) :param visible: should element be visible before getting text (default: False) :return: element attribute ] variable[element] assign[=] name[locator] if <ast.UnaryOp object at 0x7da1b26a4580> begin[:] variable[element] assign[=] call[name[self].get_present_element, parameter[name[locator], name[params], name[timeout], name[visible]]] <ast.Try object at 0x7da1b26a52a0>
keyword[def] identifier[get_attribute] ( identifier[self] , identifier[locator] , identifier[attribute] , identifier[params] = keyword[None] , identifier[timeout] = keyword[None] , identifier[visible] = keyword[False] ): literal[string] identifier[element] = identifier[locator] keyword[if] keyword[not] identifier[isinstance] ( identifier[element] , identifier[WebElement] ): identifier[element] = identifier[self] . identifier[get_present_element] ( identifier[locator] , identifier[params] , identifier[timeout] , identifier[visible] ) keyword[try] : keyword[return] identifier[element] . identifier[get_attribute] ( identifier[attribute] ) keyword[except] identifier[AttributeError] : identifier[msg] = literal[string] . identifier[format] ( identifier[attribute] ) keyword[raise] identifier[NoSuchElementException] ( identifier[msg] )
def get_attribute(self, locator, attribute, params=None, timeout=None, visible=False): """ Get attribute from element based on locator with optional parameters. Calls get_element() with expected condition: visibility of element located :param locator: locator tuple or WebElement instance :param attribute: attribute to return :param params: (optional) locator parameters :param timeout: (optional) time to wait for text (default: None) :param visible: should element be visible before getting text (default: False) :return: element attribute """ element = locator if not isinstance(element, WebElement): element = self.get_present_element(locator, params, timeout, visible) # depends on [control=['if'], data=[]] try: return element.get_attribute(attribute) # depends on [control=['try'], data=[]] except AttributeError: msg = 'Element with attribute <{}> was never located!'.format(attribute) raise NoSuchElementException(msg) # depends on [control=['except'], data=[]]
def _cutadapt_trim_cmd(fastq_files, quality_format, adapters, out_files, data): """Trimming with cutadapt, using version installed with bcbio-nextgen. """ if all([utils.file_exists(x) for x in out_files]): return out_files if quality_format == "illumina": quality_base = "64" else: quality_base = "33" # --times=2 tries twice remove adapters which will allow things like: # realsequenceAAAAAAadapter to remove both the poly-A and the adapter # this behavior might not be what we want; we could also do two or # more passes of cutadapt cutadapt = os.path.join(os.path.dirname(sys.executable), "cutadapt") adapter_cmd = " ".join(map(lambda x: "-a " + x, adapters)) ropts = " ".join(str(x) for x in config_utils.get_resources("cutadapt", data["config"]).get("options", [])) base_cmd = ("{cutadapt} {ropts} --times=2 --quality-base={quality_base} " "--quality-cutoff=5 --format=fastq " "{adapter_cmd} ").format(**locals()) if len(fastq_files) == 2: # support for the single-command paired trimming introduced in # cutadapt 1.8 adapter_cmd = adapter_cmd.replace("-a ", "-A ") base_cmd += "{adapter_cmd} ".format(adapter_cmd=adapter_cmd) return _cutadapt_pe_cmd(fastq_files, out_files, quality_format, base_cmd, data) else: return _cutadapt_se_cmd(fastq_files, out_files, base_cmd, data)
def function[_cutadapt_trim_cmd, parameter[fastq_files, quality_format, adapters, out_files, data]]: constant[Trimming with cutadapt, using version installed with bcbio-nextgen. ] if call[name[all], parameter[<ast.ListComp object at 0x7da1b18dab30>]] begin[:] return[name[out_files]] if compare[name[quality_format] equal[==] constant[illumina]] begin[:] variable[quality_base] assign[=] constant[64] variable[cutadapt] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[sys].executable]], constant[cutadapt]]] variable[adapter_cmd] assign[=] call[constant[ ].join, parameter[call[name[map], parameter[<ast.Lambda object at 0x7da1b18dac50>, name[adapters]]]]] variable[ropts] assign[=] call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da1b18d8220>]] variable[base_cmd] assign[=] call[constant[{cutadapt} {ropts} --times=2 --quality-base={quality_base} --quality-cutoff=5 --format=fastq {adapter_cmd} ].format, parameter[]] if compare[call[name[len], parameter[name[fastq_files]]] equal[==] constant[2]] begin[:] variable[adapter_cmd] assign[=] call[name[adapter_cmd].replace, parameter[constant[-a ], constant[-A ]]] <ast.AugAssign object at 0x7da1b18bd1b0> return[call[name[_cutadapt_pe_cmd], parameter[name[fastq_files], name[out_files], name[quality_format], name[base_cmd], name[data]]]]
keyword[def] identifier[_cutadapt_trim_cmd] ( identifier[fastq_files] , identifier[quality_format] , identifier[adapters] , identifier[out_files] , identifier[data] ): literal[string] keyword[if] identifier[all] ([ identifier[utils] . identifier[file_exists] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[out_files] ]): keyword[return] identifier[out_files] keyword[if] identifier[quality_format] == literal[string] : identifier[quality_base] = literal[string] keyword[else] : identifier[quality_base] = literal[string] identifier[cutadapt] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[sys] . identifier[executable] ), literal[string] ) identifier[adapter_cmd] = literal[string] . identifier[join] ( identifier[map] ( keyword[lambda] identifier[x] : literal[string] + identifier[x] , identifier[adapters] )) identifier[ropts] = literal[string] . identifier[join] ( identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[config_utils] . identifier[get_resources] ( literal[string] , identifier[data] [ literal[string] ]). identifier[get] ( literal[string] ,[])) identifier[base_cmd] =( literal[string] literal[string] literal[string] ). identifier[format] (** identifier[locals] ()) keyword[if] identifier[len] ( identifier[fastq_files] )== literal[int] : identifier[adapter_cmd] = identifier[adapter_cmd] . identifier[replace] ( literal[string] , literal[string] ) identifier[base_cmd] += literal[string] . identifier[format] ( identifier[adapter_cmd] = identifier[adapter_cmd] ) keyword[return] identifier[_cutadapt_pe_cmd] ( identifier[fastq_files] , identifier[out_files] , identifier[quality_format] , identifier[base_cmd] , identifier[data] ) keyword[else] : keyword[return] identifier[_cutadapt_se_cmd] ( identifier[fastq_files] , identifier[out_files] , identifier[base_cmd] , identifier[data] )
def _cutadapt_trim_cmd(fastq_files, quality_format, adapters, out_files, data): """Trimming with cutadapt, using version installed with bcbio-nextgen. """ if all([utils.file_exists(x) for x in out_files]): return out_files # depends on [control=['if'], data=[]] if quality_format == 'illumina': quality_base = '64' # depends on [control=['if'], data=[]] else: quality_base = '33' # --times=2 tries twice remove adapters which will allow things like: # realsequenceAAAAAAadapter to remove both the poly-A and the adapter # this behavior might not be what we want; we could also do two or # more passes of cutadapt cutadapt = os.path.join(os.path.dirname(sys.executable), 'cutadapt') adapter_cmd = ' '.join(map(lambda x: '-a ' + x, adapters)) ropts = ' '.join((str(x) for x in config_utils.get_resources('cutadapt', data['config']).get('options', []))) base_cmd = '{cutadapt} {ropts} --times=2 --quality-base={quality_base} --quality-cutoff=5 --format=fastq {adapter_cmd} '.format(**locals()) if len(fastq_files) == 2: # support for the single-command paired trimming introduced in # cutadapt 1.8 adapter_cmd = adapter_cmd.replace('-a ', '-A ') base_cmd += '{adapter_cmd} '.format(adapter_cmd=adapter_cmd) return _cutadapt_pe_cmd(fastq_files, out_files, quality_format, base_cmd, data) # depends on [control=['if'], data=[]] else: return _cutadapt_se_cmd(fastq_files, out_files, base_cmd, data)
def list_all(fritz, args): """Command that prints all device information.""" devices = fritz.get_devices() for device in devices: print('#' * 30) print('name=%s' % device.name) print(' ain=%s' % device.ain) print(' id=%s' % device.identifier) print(' productname=%s' % device.productname) print(' manufacturer=%s' % device.manufacturer) print(" present=%s" % device.present) print(" lock=%s" % device.lock) print(" devicelock=%s" % device.device_lock) if device.present is False: continue if device.has_switch: print(" Switch:") print(" switch_state=%s" % device.switch_state) if device.has_switch: print(" Powermeter:") print(" power=%s" % device.power) print(" energy=%s" % device.energy) print(" voltage=%s" % device.voltage) if device.has_temperature_sensor: print(" Temperature:") print(" temperature=%s" % device.temperature) print(" offset=%s" % device.offset) if device.has_thermostat: print(" Thermostat:") print(" battery_low=%s" % device.battery_low) print(" battery_level=%s" % device.battery_level) print(" actual=%s" % device.actual_temperature) print(" target=%s" % device.target_temperature) print(" comfort=%s" % device.comfort_temperature) print(" eco=%s" % device.eco_temperature) print(" window=%s" % device.window_open) print(" summer=%s" % device.summer_active) print(" holiday=%s" % device.holiday_active) if device.has_alarm: print(" Alert:") print(" alert=%s" % device.alert_state)
def function[list_all, parameter[fritz, args]]: constant[Command that prints all device information.] variable[devices] assign[=] call[name[fritz].get_devices, parameter[]] for taget[name[device]] in starred[name[devices]] begin[:] call[name[print], parameter[binary_operation[constant[#] * constant[30]]]] call[name[print], parameter[binary_operation[constant[name=%s] <ast.Mod object at 0x7da2590d6920> name[device].name]]] call[name[print], parameter[binary_operation[constant[ ain=%s] <ast.Mod object at 0x7da2590d6920> name[device].ain]]] call[name[print], parameter[binary_operation[constant[ id=%s] <ast.Mod object at 0x7da2590d6920> name[device].identifier]]] call[name[print], parameter[binary_operation[constant[ productname=%s] <ast.Mod object at 0x7da2590d6920> name[device].productname]]] call[name[print], parameter[binary_operation[constant[ manufacturer=%s] <ast.Mod object at 0x7da2590d6920> name[device].manufacturer]]] call[name[print], parameter[binary_operation[constant[ present=%s] <ast.Mod object at 0x7da2590d6920> name[device].present]]] call[name[print], parameter[binary_operation[constant[ lock=%s] <ast.Mod object at 0x7da2590d6920> name[device].lock]]] call[name[print], parameter[binary_operation[constant[ devicelock=%s] <ast.Mod object at 0x7da2590d6920> name[device].device_lock]]] if compare[name[device].present is constant[False]] begin[:] continue if name[device].has_switch begin[:] call[name[print], parameter[constant[ Switch:]]] call[name[print], parameter[binary_operation[constant[ switch_state=%s] <ast.Mod object at 0x7da2590d6920> name[device].switch_state]]] if name[device].has_switch begin[:] call[name[print], parameter[constant[ Powermeter:]]] call[name[print], parameter[binary_operation[constant[ power=%s] <ast.Mod object at 0x7da2590d6920> name[device].power]]] call[name[print], parameter[binary_operation[constant[ energy=%s] <ast.Mod object at 0x7da2590d6920> name[device].energy]]] call[name[print], parameter[binary_operation[constant[ voltage=%s] <ast.Mod object at 0x7da2590d6920> name[device].voltage]]] if name[device].has_temperature_sensor begin[:] call[name[print], parameter[constant[ Temperature:]]] call[name[print], parameter[binary_operation[constant[ temperature=%s] <ast.Mod object at 0x7da2590d6920> name[device].temperature]]] call[name[print], parameter[binary_operation[constant[ offset=%s] <ast.Mod object at 0x7da2590d6920> name[device].offset]]] if name[device].has_thermostat begin[:] call[name[print], parameter[constant[ Thermostat:]]] call[name[print], parameter[binary_operation[constant[ battery_low=%s] <ast.Mod object at 0x7da2590d6920> name[device].battery_low]]] call[name[print], parameter[binary_operation[constant[ battery_level=%s] <ast.Mod object at 0x7da2590d6920> name[device].battery_level]]] call[name[print], parameter[binary_operation[constant[ actual=%s] <ast.Mod object at 0x7da2590d6920> name[device].actual_temperature]]] call[name[print], parameter[binary_operation[constant[ target=%s] <ast.Mod object at 0x7da2590d6920> name[device].target_temperature]]] call[name[print], parameter[binary_operation[constant[ comfort=%s] <ast.Mod object at 0x7da2590d6920> name[device].comfort_temperature]]] call[name[print], parameter[binary_operation[constant[ eco=%s] <ast.Mod object at 0x7da2590d6920> name[device].eco_temperature]]] call[name[print], parameter[binary_operation[constant[ window=%s] <ast.Mod object at 0x7da2590d6920> name[device].window_open]]] call[name[print], parameter[binary_operation[constant[ summer=%s] <ast.Mod object at 0x7da2590d6920> name[device].summer_active]]] call[name[print], parameter[binary_operation[constant[ holiday=%s] <ast.Mod object at 0x7da2590d6920> name[device].holiday_active]]] if name[device].has_alarm begin[:] call[name[print], parameter[constant[ Alert:]]] call[name[print], parameter[binary_operation[constant[ alert=%s] <ast.Mod object at 0x7da2590d6920> name[device].alert_state]]]
keyword[def] identifier[list_all] ( identifier[fritz] , identifier[args] ): literal[string] identifier[devices] = identifier[fritz] . identifier[get_devices] () keyword[for] identifier[device] keyword[in] identifier[devices] : identifier[print] ( literal[string] * literal[int] ) identifier[print] ( literal[string] % identifier[device] . identifier[name] ) identifier[print] ( literal[string] % identifier[device] . identifier[ain] ) identifier[print] ( literal[string] % identifier[device] . identifier[identifier] ) identifier[print] ( literal[string] % identifier[device] . identifier[productname] ) identifier[print] ( literal[string] % identifier[device] . identifier[manufacturer] ) identifier[print] ( literal[string] % identifier[device] . identifier[present] ) identifier[print] ( literal[string] % identifier[device] . identifier[lock] ) identifier[print] ( literal[string] % identifier[device] . identifier[device_lock] ) keyword[if] identifier[device] . identifier[present] keyword[is] keyword[False] : keyword[continue] keyword[if] identifier[device] . identifier[has_switch] : identifier[print] ( literal[string] ) identifier[print] ( literal[string] % identifier[device] . identifier[switch_state] ) keyword[if] identifier[device] . identifier[has_switch] : identifier[print] ( literal[string] ) identifier[print] ( literal[string] % identifier[device] . identifier[power] ) identifier[print] ( literal[string] % identifier[device] . identifier[energy] ) identifier[print] ( literal[string] % identifier[device] . identifier[voltage] ) keyword[if] identifier[device] . identifier[has_temperature_sensor] : identifier[print] ( literal[string] ) identifier[print] ( literal[string] % identifier[device] . identifier[temperature] ) identifier[print] ( literal[string] % identifier[device] . identifier[offset] ) keyword[if] identifier[device] . identifier[has_thermostat] : identifier[print] ( literal[string] ) identifier[print] ( literal[string] % identifier[device] . identifier[battery_low] ) identifier[print] ( literal[string] % identifier[device] . identifier[battery_level] ) identifier[print] ( literal[string] % identifier[device] . identifier[actual_temperature] ) identifier[print] ( literal[string] % identifier[device] . identifier[target_temperature] ) identifier[print] ( literal[string] % identifier[device] . identifier[comfort_temperature] ) identifier[print] ( literal[string] % identifier[device] . identifier[eco_temperature] ) identifier[print] ( literal[string] % identifier[device] . identifier[window_open] ) identifier[print] ( literal[string] % identifier[device] . identifier[summer_active] ) identifier[print] ( literal[string] % identifier[device] . identifier[holiday_active] ) keyword[if] identifier[device] . identifier[has_alarm] : identifier[print] ( literal[string] ) identifier[print] ( literal[string] % identifier[device] . identifier[alert_state] )
def list_all(fritz, args): """Command that prints all device information.""" devices = fritz.get_devices() for device in devices: print('#' * 30) print('name=%s' % device.name) print(' ain=%s' % device.ain) print(' id=%s' % device.identifier) print(' productname=%s' % device.productname) print(' manufacturer=%s' % device.manufacturer) print(' present=%s' % device.present) print(' lock=%s' % device.lock) print(' devicelock=%s' % device.device_lock) if device.present is False: continue # depends on [control=['if'], data=[]] if device.has_switch: print(' Switch:') print(' switch_state=%s' % device.switch_state) # depends on [control=['if'], data=[]] if device.has_switch: print(' Powermeter:') print(' power=%s' % device.power) print(' energy=%s' % device.energy) print(' voltage=%s' % device.voltage) # depends on [control=['if'], data=[]] if device.has_temperature_sensor: print(' Temperature:') print(' temperature=%s' % device.temperature) print(' offset=%s' % device.offset) # depends on [control=['if'], data=[]] if device.has_thermostat: print(' Thermostat:') print(' battery_low=%s' % device.battery_low) print(' battery_level=%s' % device.battery_level) print(' actual=%s' % device.actual_temperature) print(' target=%s' % device.target_temperature) print(' comfort=%s' % device.comfort_temperature) print(' eco=%s' % device.eco_temperature) print(' window=%s' % device.window_open) print(' summer=%s' % device.summer_active) print(' holiday=%s' % device.holiday_active) # depends on [control=['if'], data=[]] if device.has_alarm: print(' Alert:') print(' alert=%s' % device.alert_state) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['device']]
def extend_env(extra_env): """ Copies and extends the current environment with the values present in `extra_env`. """ env = os.environ.copy() env.update(extra_env) return env
def function[extend_env, parameter[extra_env]]: constant[ Copies and extends the current environment with the values present in `extra_env`. ] variable[env] assign[=] call[name[os].environ.copy, parameter[]] call[name[env].update, parameter[name[extra_env]]] return[name[env]]
keyword[def] identifier[extend_env] ( identifier[extra_env] ): literal[string] identifier[env] = identifier[os] . identifier[environ] . identifier[copy] () identifier[env] . identifier[update] ( identifier[extra_env] ) keyword[return] identifier[env]
def extend_env(extra_env): """ Copies and extends the current environment with the values present in `extra_env`. """ env = os.environ.copy() env.update(extra_env) return env
def common_criteria(**common): """ Wrap a function to always call with the given ``common`` named parameters. :property common: criteria common to your function call :return: decorator function :rtype: :class:`function` .. doctest:: >>> import cqparts >>> from cqparts.search import register, search, find >>> from cqparts.search import common_criteria >>> # Somebody elses (boring) library may register with... >>> @register(a='one', b='two') ... class BoringThing(cqparts.Part): ... pass >>> # But your library is awesome; only registering with unique criteria... >>> lib_criteria = { ... 'author': 'your_name', ... 'libname': 'awesome_things', ... } >>> awesome_register = common_criteria(**lib_criteria)(register) >>> @awesome_register(a='one', b='two') # identical to BoringThing ... class AwesomeThing(cqparts.Part): ... pass >>> # So lets try a search >>> len(search(a='one', b='two')) # doctest: +SKIP 2 >>> # oops, that returned both classes >>> # To narrow it down, we add something unique: >>> len(search(a='one', b='two', libname='awesome_things')) # finds only yours # doctest: +SKIP 1 >>> # or, we could use common_criteria again... >>> awesome_search = common_criteria(**lib_criteria)(search) >>> awesome_find = common_criteria(**lib_criteria)(find) >>> len(awesome_search(a='one', b='two')) # doctest: +SKIP 1 >>> awesome_find(a='one', b='two').__name__ 'AwesomeThing' A good universal way to apply unique criteria is with .. testcode:: import cadquery, cqparts from cqparts.search import register, common_criteria _register = common_criteria(module=__name__)(register) @_register(shape='cube', scale='unit') class Cube(cqparts.Part): # just an example... def make(self): return cadquery.Workplane('XY').box(1, 1, 1) """ def decorator(func): def inner(*args, **kwargs): merged_kwargs = copy(common) merged_kwargs.update(kwargs) return func(*args, **merged_kwargs) return inner return decorator
def function[common_criteria, parameter[]]: constant[ Wrap a function to always call with the given ``common`` named parameters. :property common: criteria common to your function call :return: decorator function :rtype: :class:`function` .. doctest:: >>> import cqparts >>> from cqparts.search import register, search, find >>> from cqparts.search import common_criteria >>> # Somebody elses (boring) library may register with... >>> @register(a='one', b='two') ... class BoringThing(cqparts.Part): ... pass >>> # But your library is awesome; only registering with unique criteria... >>> lib_criteria = { ... 'author': 'your_name', ... 'libname': 'awesome_things', ... } >>> awesome_register = common_criteria(**lib_criteria)(register) >>> @awesome_register(a='one', b='two') # identical to BoringThing ... class AwesomeThing(cqparts.Part): ... pass >>> # So lets try a search >>> len(search(a='one', b='two')) # doctest: +SKIP 2 >>> # oops, that returned both classes >>> # To narrow it down, we add something unique: >>> len(search(a='one', b='two', libname='awesome_things')) # finds only yours # doctest: +SKIP 1 >>> # or, we could use common_criteria again... >>> awesome_search = common_criteria(**lib_criteria)(search) >>> awesome_find = common_criteria(**lib_criteria)(find) >>> len(awesome_search(a='one', b='two')) # doctest: +SKIP 1 >>> awesome_find(a='one', b='two').__name__ 'AwesomeThing' A good universal way to apply unique criteria is with .. testcode:: import cadquery, cqparts from cqparts.search import register, common_criteria _register = common_criteria(module=__name__)(register) @_register(shape='cube', scale='unit') class Cube(cqparts.Part): # just an example... def make(self): return cadquery.Workplane('XY').box(1, 1, 1) ] def function[decorator, parameter[func]]: def function[inner, parameter[]]: variable[merged_kwargs] assign[=] call[name[copy], parameter[name[common]]] call[name[merged_kwargs].update, parameter[name[kwargs]]] return[call[name[func], parameter[<ast.Starred object at 0x7da18f810ee0>]]] return[name[inner]] return[name[decorator]]
keyword[def] identifier[common_criteria] (** identifier[common] ): literal[string] keyword[def] identifier[decorator] ( identifier[func] ): keyword[def] identifier[inner] (* identifier[args] ,** identifier[kwargs] ): identifier[merged_kwargs] = identifier[copy] ( identifier[common] ) identifier[merged_kwargs] . identifier[update] ( identifier[kwargs] ) keyword[return] identifier[func] (* identifier[args] ,** identifier[merged_kwargs] ) keyword[return] identifier[inner] keyword[return] identifier[decorator]
def common_criteria(**common): """ Wrap a function to always call with the given ``common`` named parameters. :property common: criteria common to your function call :return: decorator function :rtype: :class:`function` .. doctest:: >>> import cqparts >>> from cqparts.search import register, search, find >>> from cqparts.search import common_criteria >>> # Somebody elses (boring) library may register with... >>> @register(a='one', b='two') ... class BoringThing(cqparts.Part): ... pass >>> # But your library is awesome; only registering with unique criteria... >>> lib_criteria = { ... 'author': 'your_name', ... 'libname': 'awesome_things', ... } >>> awesome_register = common_criteria(**lib_criteria)(register) >>> @awesome_register(a='one', b='two') # identical to BoringThing ... class AwesomeThing(cqparts.Part): ... pass >>> # So lets try a search >>> len(search(a='one', b='two')) # doctest: +SKIP 2 >>> # oops, that returned both classes >>> # To narrow it down, we add something unique: >>> len(search(a='one', b='two', libname='awesome_things')) # finds only yours # doctest: +SKIP 1 >>> # or, we could use common_criteria again... >>> awesome_search = common_criteria(**lib_criteria)(search) >>> awesome_find = common_criteria(**lib_criteria)(find) >>> len(awesome_search(a='one', b='two')) # doctest: +SKIP 1 >>> awesome_find(a='one', b='two').__name__ 'AwesomeThing' A good universal way to apply unique criteria is with .. testcode:: import cadquery, cqparts from cqparts.search import register, common_criteria _register = common_criteria(module=__name__)(register) @_register(shape='cube', scale='unit') class Cube(cqparts.Part): # just an example... def make(self): return cadquery.Workplane('XY').box(1, 1, 1) """ def decorator(func): def inner(*args, **kwargs): merged_kwargs = copy(common) merged_kwargs.update(kwargs) return func(*args, **merged_kwargs) return inner return decorator
def inspect(orm_class, attribute_name): """ :param attribute_name: name of the mapped attribute to inspect. :returns: list of 2-tuples containing information about the inspected attribute (first element: mapped entity attribute kind; second attribute: mapped entity attribute) """ key = (orm_class, attribute_name) elems = OrmAttributeInspector.__cache.get(key) if elems is None: elems = OrmAttributeInspector.__inspect(key) OrmAttributeInspector.__cache[key] = elems return elems
def function[inspect, parameter[orm_class, attribute_name]]: constant[ :param attribute_name: name of the mapped attribute to inspect. :returns: list of 2-tuples containing information about the inspected attribute (first element: mapped entity attribute kind; second attribute: mapped entity attribute) ] variable[key] assign[=] tuple[[<ast.Name object at 0x7da1b27e11e0>, <ast.Name object at 0x7da1b27e3970>]] variable[elems] assign[=] call[name[OrmAttributeInspector].__cache.get, parameter[name[key]]] if compare[name[elems] is constant[None]] begin[:] variable[elems] assign[=] call[name[OrmAttributeInspector].__inspect, parameter[name[key]]] call[name[OrmAttributeInspector].__cache][name[key]] assign[=] name[elems] return[name[elems]]
keyword[def] identifier[inspect] ( identifier[orm_class] , identifier[attribute_name] ): literal[string] identifier[key] =( identifier[orm_class] , identifier[attribute_name] ) identifier[elems] = identifier[OrmAttributeInspector] . identifier[__cache] . identifier[get] ( identifier[key] ) keyword[if] identifier[elems] keyword[is] keyword[None] : identifier[elems] = identifier[OrmAttributeInspector] . identifier[__inspect] ( identifier[key] ) identifier[OrmAttributeInspector] . identifier[__cache] [ identifier[key] ]= identifier[elems] keyword[return] identifier[elems]
def inspect(orm_class, attribute_name): """ :param attribute_name: name of the mapped attribute to inspect. :returns: list of 2-tuples containing information about the inspected attribute (first element: mapped entity attribute kind; second attribute: mapped entity attribute) """ key = (orm_class, attribute_name) elems = OrmAttributeInspector.__cache.get(key) if elems is None: elems = OrmAttributeInspector.__inspect(key) OrmAttributeInspector.__cache[key] = elems # depends on [control=['if'], data=['elems']] return elems
def setOverlayFromFile(self, ulOverlayHandle, pchFilePath): """ Separate interface for providing the image through a filename: can be png or jpg, and should not be bigger than 1920x1080. This function can only be called by the overlay's renderer process """ fn = self.function_table.setOverlayFromFile result = fn(ulOverlayHandle, pchFilePath) return result
def function[setOverlayFromFile, parameter[self, ulOverlayHandle, pchFilePath]]: constant[ Separate interface for providing the image through a filename: can be png or jpg, and should not be bigger than 1920x1080. This function can only be called by the overlay's renderer process ] variable[fn] assign[=] name[self].function_table.setOverlayFromFile variable[result] assign[=] call[name[fn], parameter[name[ulOverlayHandle], name[pchFilePath]]] return[name[result]]
keyword[def] identifier[setOverlayFromFile] ( identifier[self] , identifier[ulOverlayHandle] , identifier[pchFilePath] ): literal[string] identifier[fn] = identifier[self] . identifier[function_table] . identifier[setOverlayFromFile] identifier[result] = identifier[fn] ( identifier[ulOverlayHandle] , identifier[pchFilePath] ) keyword[return] identifier[result]
def setOverlayFromFile(self, ulOverlayHandle, pchFilePath): """ Separate interface for providing the image through a filename: can be png or jpg, and should not be bigger than 1920x1080. This function can only be called by the overlay's renderer process """ fn = self.function_table.setOverlayFromFile result = fn(ulOverlayHandle, pchFilePath) return result
def serialize(obj): """Serialize the given object into JSON. Args: obj: the object to be serialized. Returns: (str): JSON representation of the given object. """ LOGGER.debug('serialize(%s)', obj) if isinstance(obj, datetime.date): return simplejson.dumps(obj, default=encoders.as_date) elif hasattr(obj, '__dict__'): return simplejson.dumps(obj, default=encoders.as_object) return simplejson.dumps(obj)
def function[serialize, parameter[obj]]: constant[Serialize the given object into JSON. Args: obj: the object to be serialized. Returns: (str): JSON representation of the given object. ] call[name[LOGGER].debug, parameter[constant[serialize(%s)], name[obj]]] if call[name[isinstance], parameter[name[obj], name[datetime].date]] begin[:] return[call[name[simplejson].dumps, parameter[name[obj]]]] return[call[name[simplejson].dumps, parameter[name[obj]]]]
keyword[def] identifier[serialize] ( identifier[obj] ): literal[string] identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[obj] ) keyword[if] identifier[isinstance] ( identifier[obj] , identifier[datetime] . identifier[date] ): keyword[return] identifier[simplejson] . identifier[dumps] ( identifier[obj] , identifier[default] = identifier[encoders] . identifier[as_date] ) keyword[elif] identifier[hasattr] ( identifier[obj] , literal[string] ): keyword[return] identifier[simplejson] . identifier[dumps] ( identifier[obj] , identifier[default] = identifier[encoders] . identifier[as_object] ) keyword[return] identifier[simplejson] . identifier[dumps] ( identifier[obj] )
def serialize(obj): """Serialize the given object into JSON. Args: obj: the object to be serialized. Returns: (str): JSON representation of the given object. """ LOGGER.debug('serialize(%s)', obj) if isinstance(obj, datetime.date): return simplejson.dumps(obj, default=encoders.as_date) # depends on [control=['if'], data=[]] elif hasattr(obj, '__dict__'): return simplejson.dumps(obj, default=encoders.as_object) # depends on [control=['if'], data=[]] return simplejson.dumps(obj)
def get_phi(self, scalar=None): """Phi (φ) Coefficient -- lack of confusion Arguments: scalar (bool or None): Whether to return a scalar Phi coefficient (assume binary classification) rather than a multiclass vector Measure of the lack of confusion in a single value References: [MCC on wikipedia](https://en.wikipedia.org/wiki/Matthews_correlation_coefficient) [docs on R implementation](http://www.personality-project.org/r/html/phi.html) φ = (TP*TN - FP*FN) / sqrt((TP+FP) * (TP+FN) * (TN+FP) * (TN+FN)) mcc = (tp*tn - fp*fn) / sqrt((tp+fp) * (tp+fn) * (tn+fp) * (tn+fn)) """ # If requested, compute the phi coeffients for all possible 'positive' and 'negative' class labels # (multiclass problem) if ((not self._scalar_stats and not scalar and self._num_classes > 2) or ((scalar is False or self._scalar_stats is False) and self._num_classes > 1)): phi = PrettyDict() # count of predictions labeled with pred_label for a slice of data that was actually labeled true_label: # `count = self[pred_label][true_label]` for pos_label in self.columns: tp, tn, fp, fn = dataframe_tptnfpfn(self, pos_label=pos_label, labels=self.columns) phi[pos_label] = tptnfpfn_mcc(tp=tp, tn=tn, fp=fp, fn=fn) return pd.Series(phi) # A scalar phi value was requested, so compute it for the "inferred" positive classification class return tptnfpfn_mcc(self._tp, self._tn, self._fp, self._fn)
def function[get_phi, parameter[self, scalar]]: constant[Phi (φ) Coefficient -- lack of confusion Arguments: scalar (bool or None): Whether to return a scalar Phi coefficient (assume binary classification) rather than a multiclass vector Measure of the lack of confusion in a single value References: [MCC on wikipedia](https://en.wikipedia.org/wiki/Matthews_correlation_coefficient) [docs on R implementation](http://www.personality-project.org/r/html/phi.html) φ = (TP*TN - FP*FN) / sqrt((TP+FP) * (TP+FN) * (TN+FP) * (TN+FN)) mcc = (tp*tn - fp*fn) / sqrt((tp+fp) * (tp+fn) * (tn+fp) * (tn+fn)) ] if <ast.BoolOp object at 0x7da2047e8be0> begin[:] variable[phi] assign[=] call[name[PrettyDict], parameter[]] for taget[name[pos_label]] in starred[name[self].columns] begin[:] <ast.Tuple object at 0x7da2047e81c0> assign[=] call[name[dataframe_tptnfpfn], parameter[name[self]]] call[name[phi]][name[pos_label]] assign[=] call[name[tptnfpfn_mcc], parameter[]] return[call[name[pd].Series, parameter[name[phi]]]] return[call[name[tptnfpfn_mcc], parameter[name[self]._tp, name[self]._tn, name[self]._fp, name[self]._fn]]]
keyword[def] identifier[get_phi] ( identifier[self] , identifier[scalar] = keyword[None] ): literal[string] keyword[if] (( keyword[not] identifier[self] . identifier[_scalar_stats] keyword[and] keyword[not] identifier[scalar] keyword[and] identifier[self] . identifier[_num_classes] > literal[int] ) keyword[or] (( identifier[scalar] keyword[is] keyword[False] keyword[or] identifier[self] . identifier[_scalar_stats] keyword[is] keyword[False] ) keyword[and] identifier[self] . identifier[_num_classes] > literal[int] )): identifier[phi] = identifier[PrettyDict] () keyword[for] identifier[pos_label] keyword[in] identifier[self] . identifier[columns] : identifier[tp] , identifier[tn] , identifier[fp] , identifier[fn] = identifier[dataframe_tptnfpfn] ( identifier[self] , identifier[pos_label] = identifier[pos_label] , identifier[labels] = identifier[self] . identifier[columns] ) identifier[phi] [ identifier[pos_label] ]= identifier[tptnfpfn_mcc] ( identifier[tp] = identifier[tp] , identifier[tn] = identifier[tn] , identifier[fp] = identifier[fp] , identifier[fn] = identifier[fn] ) keyword[return] identifier[pd] . identifier[Series] ( identifier[phi] ) keyword[return] identifier[tptnfpfn_mcc] ( identifier[self] . identifier[_tp] , identifier[self] . identifier[_tn] , identifier[self] . identifier[_fp] , identifier[self] . identifier[_fn] )
def get_phi(self, scalar=None): """Phi (φ) Coefficient -- lack of confusion Arguments: scalar (bool or None): Whether to return a scalar Phi coefficient (assume binary classification) rather than a multiclass vector Measure of the lack of confusion in a single value References: [MCC on wikipedia](https://en.wikipedia.org/wiki/Matthews_correlation_coefficient) [docs on R implementation](http://www.personality-project.org/r/html/phi.html) φ = (TP*TN - FP*FN) / sqrt((TP+FP) * (TP+FN) * (TN+FP) * (TN+FN)) mcc = (tp*tn - fp*fn) / sqrt((tp+fp) * (tp+fn) * (tn+fp) * (tn+fn)) """ # If requested, compute the phi coeffients for all possible 'positive' and 'negative' class labels # (multiclass problem) if not self._scalar_stats and (not scalar) and (self._num_classes > 2) or ((scalar is False or self._scalar_stats is False) and self._num_classes > 1): phi = PrettyDict() # count of predictions labeled with pred_label for a slice of data that was actually labeled true_label: # `count = self[pred_label][true_label]` for pos_label in self.columns: (tp, tn, fp, fn) = dataframe_tptnfpfn(self, pos_label=pos_label, labels=self.columns) phi[pos_label] = tptnfpfn_mcc(tp=tp, tn=tn, fp=fp, fn=fn) # depends on [control=['for'], data=['pos_label']] return pd.Series(phi) # depends on [control=['if'], data=[]] # A scalar phi value was requested, so compute it for the "inferred" positive classification class return tptnfpfn_mcc(self._tp, self._tn, self._fp, self._fn)
def eval_objfn(self): """Compute components of objective function as well as total contribution to objective function. """ fval = self.obfn_f() gval = self.obfn_g(self.obfn_gvar()) obj = fval + gval return (obj, fval, gval)
def function[eval_objfn, parameter[self]]: constant[Compute components of objective function as well as total contribution to objective function. ] variable[fval] assign[=] call[name[self].obfn_f, parameter[]] variable[gval] assign[=] call[name[self].obfn_g, parameter[call[name[self].obfn_gvar, parameter[]]]] variable[obj] assign[=] binary_operation[name[fval] + name[gval]] return[tuple[[<ast.Name object at 0x7da1b08d8340>, <ast.Name object at 0x7da1b08db460>, <ast.Name object at 0x7da1b08dabf0>]]]
keyword[def] identifier[eval_objfn] ( identifier[self] ): literal[string] identifier[fval] = identifier[self] . identifier[obfn_f] () identifier[gval] = identifier[self] . identifier[obfn_g] ( identifier[self] . identifier[obfn_gvar] ()) identifier[obj] = identifier[fval] + identifier[gval] keyword[return] ( identifier[obj] , identifier[fval] , identifier[gval] )
def eval_objfn(self): """Compute components of objective function as well as total contribution to objective function. """ fval = self.obfn_f() gval = self.obfn_g(self.obfn_gvar()) obj = fval + gval return (obj, fval, gval)
def validate(payload, schema): """Validate `payload` against `schema`, returning an error list. jsonschema provides lots of information in it's errors, but it can be a bit of work to extract all the information. """ v = jsonschema.Draft4Validator( schema, format_checker=jsonschema.FormatChecker()) error_list = [] for error in v.iter_errors(payload): message = error.message location = '/' + '/'.join([str(c) for c in error.absolute_path]) error_list.append(message + ' at ' + location) return error_list
def function[validate, parameter[payload, schema]]: constant[Validate `payload` against `schema`, returning an error list. jsonschema provides lots of information in it's errors, but it can be a bit of work to extract all the information. ] variable[v] assign[=] call[name[jsonschema].Draft4Validator, parameter[name[schema]]] variable[error_list] assign[=] list[[]] for taget[name[error]] in starred[call[name[v].iter_errors, parameter[name[payload]]]] begin[:] variable[message] assign[=] name[error].message variable[location] assign[=] binary_operation[constant[/] + call[constant[/].join, parameter[<ast.ListComp object at 0x7da1b10607c0>]]] call[name[error_list].append, parameter[binary_operation[binary_operation[name[message] + constant[ at ]] + name[location]]]] return[name[error_list]]
keyword[def] identifier[validate] ( identifier[payload] , identifier[schema] ): literal[string] identifier[v] = identifier[jsonschema] . identifier[Draft4Validator] ( identifier[schema] , identifier[format_checker] = identifier[jsonschema] . identifier[FormatChecker] ()) identifier[error_list] =[] keyword[for] identifier[error] keyword[in] identifier[v] . identifier[iter_errors] ( identifier[payload] ): identifier[message] = identifier[error] . identifier[message] identifier[location] = literal[string] + literal[string] . identifier[join] ([ identifier[str] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[error] . identifier[absolute_path] ]) identifier[error_list] . identifier[append] ( identifier[message] + literal[string] + identifier[location] ) keyword[return] identifier[error_list]
def validate(payload, schema): """Validate `payload` against `schema`, returning an error list. jsonschema provides lots of information in it's errors, but it can be a bit of work to extract all the information. """ v = jsonschema.Draft4Validator(schema, format_checker=jsonschema.FormatChecker()) error_list = [] for error in v.iter_errors(payload): message = error.message location = '/' + '/'.join([str(c) for c in error.absolute_path]) error_list.append(message + ' at ' + location) # depends on [control=['for'], data=['error']] return error_list
def neighborhood(self, node, degree=4): """Am I really handcoding graph traversal please no""" assert self.by_name[node.name] == node already_visited = frontier = set([node.name]) for _ in range(degree): neighbor_names = set() for node_name in frontier: outgoing = set(n.name for n in self.by_input[node_name]) incoming = set(self.by_name[node_name].input) neighbor_names |= incoming | outgoing frontier = neighbor_names - already_visited already_visited |= neighbor_names return [self.by_name[name] for name in already_visited]
def function[neighborhood, parameter[self, node, degree]]: constant[Am I really handcoding graph traversal please no] assert[compare[call[name[self].by_name][name[node].name] equal[==] name[node]]] variable[already_visited] assign[=] call[name[set], parameter[list[[<ast.Attribute object at 0x7da1b1f88160>]]]] for taget[name[_]] in starred[call[name[range], parameter[name[degree]]]] begin[:] variable[neighbor_names] assign[=] call[name[set], parameter[]] for taget[name[node_name]] in starred[name[frontier]] begin[:] variable[outgoing] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da1b1f8aec0>]] variable[incoming] assign[=] call[name[set], parameter[call[name[self].by_name][name[node_name]].input]] <ast.AugAssign object at 0x7da1b1f89570> variable[frontier] assign[=] binary_operation[name[neighbor_names] - name[already_visited]] <ast.AugAssign object at 0x7da1b1f8ad10> return[<ast.ListComp object at 0x7da1b1f8b8b0>]
keyword[def] identifier[neighborhood] ( identifier[self] , identifier[node] , identifier[degree] = literal[int] ): literal[string] keyword[assert] identifier[self] . identifier[by_name] [ identifier[node] . identifier[name] ]== identifier[node] identifier[already_visited] = identifier[frontier] = identifier[set] ([ identifier[node] . identifier[name] ]) keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[degree] ): identifier[neighbor_names] = identifier[set] () keyword[for] identifier[node_name] keyword[in] identifier[frontier] : identifier[outgoing] = identifier[set] ( identifier[n] . identifier[name] keyword[for] identifier[n] keyword[in] identifier[self] . identifier[by_input] [ identifier[node_name] ]) identifier[incoming] = identifier[set] ( identifier[self] . identifier[by_name] [ identifier[node_name] ]. identifier[input] ) identifier[neighbor_names] |= identifier[incoming] | identifier[outgoing] identifier[frontier] = identifier[neighbor_names] - identifier[already_visited] identifier[already_visited] |= identifier[neighbor_names] keyword[return] [ identifier[self] . identifier[by_name] [ identifier[name] ] keyword[for] identifier[name] keyword[in] identifier[already_visited] ]
def neighborhood(self, node, degree=4): """Am I really handcoding graph traversal please no""" assert self.by_name[node.name] == node already_visited = frontier = set([node.name]) for _ in range(degree): neighbor_names = set() for node_name in frontier: outgoing = set((n.name for n in self.by_input[node_name])) incoming = set(self.by_name[node_name].input) neighbor_names |= incoming | outgoing # depends on [control=['for'], data=['node_name']] frontier = neighbor_names - already_visited already_visited |= neighbor_names # depends on [control=['for'], data=[]] return [self.by_name[name] for name in already_visited]
def post_account(self, headers=None, query=None, cdn=False, body=None): """ POSTs the account and returns the results. This is usually done to set X-Account-Meta-xxx headers. Note that any existing X-Account-Meta-xxx headers will remain untouched. To remove an X-Account-Meta-xxx header, send the header with an empty string as its value. :param headers: Additional headers to send with the request. :param query: Set to a dict of query values to send on the query string of the request. :param cdn: If set True, the CDN management interface will be used. :param body: No known Swift POSTs take a body; but the option is there for the future. :returns: A tuple of (status, reason, headers, contents). :status: is an int for the HTTP status code. :reason: is the str for the HTTP status (ex: "Ok"). :headers: is a dict with all lowercase keys of the HTTP headers; if a header has multiple values, it will be a list. :contents: is the str for the HTTP body. """ return self.request( 'POST', '', body or '', headers, query=query, cdn=cdn)
def function[post_account, parameter[self, headers, query, cdn, body]]: constant[ POSTs the account and returns the results. This is usually done to set X-Account-Meta-xxx headers. Note that any existing X-Account-Meta-xxx headers will remain untouched. To remove an X-Account-Meta-xxx header, send the header with an empty string as its value. :param headers: Additional headers to send with the request. :param query: Set to a dict of query values to send on the query string of the request. :param cdn: If set True, the CDN management interface will be used. :param body: No known Swift POSTs take a body; but the option is there for the future. :returns: A tuple of (status, reason, headers, contents). :status: is an int for the HTTP status code. :reason: is the str for the HTTP status (ex: "Ok"). :headers: is a dict with all lowercase keys of the HTTP headers; if a header has multiple values, it will be a list. :contents: is the str for the HTTP body. ] return[call[name[self].request, parameter[constant[POST], constant[], <ast.BoolOp object at 0x7da1b02a4df0>, name[headers]]]]
keyword[def] identifier[post_account] ( identifier[self] , identifier[headers] = keyword[None] , identifier[query] = keyword[None] , identifier[cdn] = keyword[False] , identifier[body] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[request] ( literal[string] , literal[string] , identifier[body] keyword[or] literal[string] , identifier[headers] , identifier[query] = identifier[query] , identifier[cdn] = identifier[cdn] )
def post_account(self, headers=None, query=None, cdn=False, body=None): """ POSTs the account and returns the results. This is usually done to set X-Account-Meta-xxx headers. Note that any existing X-Account-Meta-xxx headers will remain untouched. To remove an X-Account-Meta-xxx header, send the header with an empty string as its value. :param headers: Additional headers to send with the request. :param query: Set to a dict of query values to send on the query string of the request. :param cdn: If set True, the CDN management interface will be used. :param body: No known Swift POSTs take a body; but the option is there for the future. :returns: A tuple of (status, reason, headers, contents). :status: is an int for the HTTP status code. :reason: is the str for the HTTP status (ex: "Ok"). :headers: is a dict with all lowercase keys of the HTTP headers; if a header has multiple values, it will be a list. :contents: is the str for the HTTP body. """ return self.request('POST', '', body or '', headers, query=query, cdn=cdn)
def save_configuration_to_hdf5(register, configuration_file, name=''): '''Saving configuration to HDF5 file from register object Parameters ---------- register : pybar.fei4.register object configuration_file : string, file Filename of the HDF5 configuration file or file object. name : string Additional identifier (subgroup). Useful when storing more than one configuration inside a HDF5 file. ''' def save_conf(): logging.info("Saving configuration: %s" % h5_file.filename) register.configuration_file = h5_file.filename try: configuration_group = h5_file.create_group(h5_file.root, "configuration") except tb.NodeError: configuration_group = h5_file.root.configuration if name: try: configuration_group = h5_file.create_group(configuration_group, name) except tb.NodeError: configuration_group = h5_file.root.configuration.name # calibration_parameters try: h5_file.remove_node(configuration_group, name='calibration_parameters') except tb.NodeError: pass calibration_data_table = h5_file.create_table(configuration_group, name='calibration_parameters', description=NameValue, title='calibration_parameters') calibration_data_row = calibration_data_table.row for key, value in register.calibration_parameters.iteritems(): calibration_data_row['name'] = key calibration_data_row['value'] = str(value) calibration_data_row.append() calibration_data_table.flush() # miscellaneous try: h5_file.remove_node(configuration_group, name='miscellaneous') except tb.NodeError: pass miscellaneous_data_table = h5_file.create_table(configuration_group, name='miscellaneous', description=NameValue, title='miscellaneous') miscellaneous_data_row = miscellaneous_data_table.row miscellaneous_data_row['name'] = 'Flavor' miscellaneous_data_row['value'] = register.flavor miscellaneous_data_row.append() miscellaneous_data_row['name'] = 'Chip_ID' miscellaneous_data_row['value'] = register.chip_id miscellaneous_data_row.append() for key, value in register.miscellaneous.iteritems(): miscellaneous_data_row['name'] = key miscellaneous_data_row['value'] = value miscellaneous_data_row.append() miscellaneous_data_table.flush() # global try: h5_file.remove_node(configuration_group, name='global_register') except tb.NodeError: pass global_data_table = h5_file.create_table(configuration_group, name='global_register', description=NameValue, title='global_register') global_data_table_row = global_data_table.row global_regs = register.get_global_register_objects(readonly=False) for global_reg in sorted(global_regs, key=itemgetter('name')): global_data_table_row['name'] = global_reg['name'] global_data_table_row['value'] = global_reg['value'] # TODO: some function that converts to bin, hex global_data_table_row.append() global_data_table.flush() # pixel for pixel_reg in register.pixel_registers.itervalues(): try: h5_file.remove_node(configuration_group, name=pixel_reg['name']) except tb.NodeError: pass data = pixel_reg['value'].T atom = tb.Atom.from_dtype(data.dtype) ds = h5_file.create_carray(configuration_group, name=pixel_reg['name'], atom=atom, shape=data.shape, title=pixel_reg['name']) ds[:] = data if isinstance(configuration_file, tb.file.File): h5_file = configuration_file save_conf() else: with tb.open_file(configuration_file, mode="a", title='') as h5_file: save_conf()
def function[save_configuration_to_hdf5, parameter[register, configuration_file, name]]: constant[Saving configuration to HDF5 file from register object Parameters ---------- register : pybar.fei4.register object configuration_file : string, file Filename of the HDF5 configuration file or file object. name : string Additional identifier (subgroup). Useful when storing more than one configuration inside a HDF5 file. ] def function[save_conf, parameter[]]: call[name[logging].info, parameter[binary_operation[constant[Saving configuration: %s] <ast.Mod object at 0x7da2590d6920> name[h5_file].filename]]] name[register].configuration_file assign[=] name[h5_file].filename <ast.Try object at 0x7da1b11a1a80> if name[name] begin[:] <ast.Try object at 0x7da1b11a3af0> <ast.Try object at 0x7da1b11a0790> variable[calibration_data_table] assign[=] call[name[h5_file].create_table, parameter[name[configuration_group]]] variable[calibration_data_row] assign[=] name[calibration_data_table].row for taget[tuple[[<ast.Name object at 0x7da1b11a3010>, <ast.Name object at 0x7da1b11a1db0>]]] in starred[call[name[register].calibration_parameters.iteritems, parameter[]]] begin[:] call[name[calibration_data_row]][constant[name]] assign[=] name[key] call[name[calibration_data_row]][constant[value]] assign[=] call[name[str], parameter[name[value]]] call[name[calibration_data_row].append, parameter[]] call[name[calibration_data_table].flush, parameter[]] <ast.Try object at 0x7da1b11a1450> variable[miscellaneous_data_table] assign[=] call[name[h5_file].create_table, parameter[name[configuration_group]]] variable[miscellaneous_data_row] assign[=] name[miscellaneous_data_table].row call[name[miscellaneous_data_row]][constant[name]] assign[=] constant[Flavor] call[name[miscellaneous_data_row]][constant[value]] assign[=] name[register].flavor call[name[miscellaneous_data_row].append, parameter[]] call[name[miscellaneous_data_row]][constant[name]] assign[=] constant[Chip_ID] call[name[miscellaneous_data_row]][constant[value]] assign[=] name[register].chip_id call[name[miscellaneous_data_row].append, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b11a1b40>, <ast.Name object at 0x7da1b11a00a0>]]] in starred[call[name[register].miscellaneous.iteritems, parameter[]]] begin[:] call[name[miscellaneous_data_row]][constant[name]] assign[=] name[key] call[name[miscellaneous_data_row]][constant[value]] assign[=] name[value] call[name[miscellaneous_data_row].append, parameter[]] call[name[miscellaneous_data_table].flush, parameter[]] <ast.Try object at 0x7da1b11a22c0> variable[global_data_table] assign[=] call[name[h5_file].create_table, parameter[name[configuration_group]]] variable[global_data_table_row] assign[=] name[global_data_table].row variable[global_regs] assign[=] call[name[register].get_global_register_objects, parameter[]] for taget[name[global_reg]] in starred[call[name[sorted], parameter[name[global_regs]]]] begin[:] call[name[global_data_table_row]][constant[name]] assign[=] call[name[global_reg]][constant[name]] call[name[global_data_table_row]][constant[value]] assign[=] call[name[global_reg]][constant[value]] call[name[global_data_table_row].append, parameter[]] call[name[global_data_table].flush, parameter[]] for taget[name[pixel_reg]] in starred[call[name[register].pixel_registers.itervalues, parameter[]]] begin[:] <ast.Try object at 0x7da1b11a2290> variable[data] assign[=] call[name[pixel_reg]][constant[value]].T variable[atom] assign[=] call[name[tb].Atom.from_dtype, parameter[name[data].dtype]] variable[ds] assign[=] call[name[h5_file].create_carray, parameter[name[configuration_group]]] call[name[ds]][<ast.Slice object at 0x7da1b11e2590>] assign[=] name[data] if call[name[isinstance], parameter[name[configuration_file], name[tb].file.File]] begin[:] variable[h5_file] assign[=] name[configuration_file] call[name[save_conf], parameter[]]
keyword[def] identifier[save_configuration_to_hdf5] ( identifier[register] , identifier[configuration_file] , identifier[name] = literal[string] ): literal[string] keyword[def] identifier[save_conf] (): identifier[logging] . identifier[info] ( literal[string] % identifier[h5_file] . identifier[filename] ) identifier[register] . identifier[configuration_file] = identifier[h5_file] . identifier[filename] keyword[try] : identifier[configuration_group] = identifier[h5_file] . identifier[create_group] ( identifier[h5_file] . identifier[root] , literal[string] ) keyword[except] identifier[tb] . identifier[NodeError] : identifier[configuration_group] = identifier[h5_file] . identifier[root] . identifier[configuration] keyword[if] identifier[name] : keyword[try] : identifier[configuration_group] = identifier[h5_file] . identifier[create_group] ( identifier[configuration_group] , identifier[name] ) keyword[except] identifier[tb] . identifier[NodeError] : identifier[configuration_group] = identifier[h5_file] . identifier[root] . identifier[configuration] . identifier[name] keyword[try] : identifier[h5_file] . identifier[remove_node] ( identifier[configuration_group] , identifier[name] = literal[string] ) keyword[except] identifier[tb] . identifier[NodeError] : keyword[pass] identifier[calibration_data_table] = identifier[h5_file] . identifier[create_table] ( identifier[configuration_group] , identifier[name] = literal[string] , identifier[description] = identifier[NameValue] , identifier[title] = literal[string] ) identifier[calibration_data_row] = identifier[calibration_data_table] . identifier[row] keyword[for] identifier[key] , identifier[value] keyword[in] identifier[register] . identifier[calibration_parameters] . identifier[iteritems] (): identifier[calibration_data_row] [ literal[string] ]= identifier[key] identifier[calibration_data_row] [ literal[string] ]= identifier[str] ( identifier[value] ) identifier[calibration_data_row] . identifier[append] () identifier[calibration_data_table] . identifier[flush] () keyword[try] : identifier[h5_file] . identifier[remove_node] ( identifier[configuration_group] , identifier[name] = literal[string] ) keyword[except] identifier[tb] . identifier[NodeError] : keyword[pass] identifier[miscellaneous_data_table] = identifier[h5_file] . identifier[create_table] ( identifier[configuration_group] , identifier[name] = literal[string] , identifier[description] = identifier[NameValue] , identifier[title] = literal[string] ) identifier[miscellaneous_data_row] = identifier[miscellaneous_data_table] . identifier[row] identifier[miscellaneous_data_row] [ literal[string] ]= literal[string] identifier[miscellaneous_data_row] [ literal[string] ]= identifier[register] . identifier[flavor] identifier[miscellaneous_data_row] . identifier[append] () identifier[miscellaneous_data_row] [ literal[string] ]= literal[string] identifier[miscellaneous_data_row] [ literal[string] ]= identifier[register] . identifier[chip_id] identifier[miscellaneous_data_row] . identifier[append] () keyword[for] identifier[key] , identifier[value] keyword[in] identifier[register] . identifier[miscellaneous] . identifier[iteritems] (): identifier[miscellaneous_data_row] [ literal[string] ]= identifier[key] identifier[miscellaneous_data_row] [ literal[string] ]= identifier[value] identifier[miscellaneous_data_row] . identifier[append] () identifier[miscellaneous_data_table] . identifier[flush] () keyword[try] : identifier[h5_file] . identifier[remove_node] ( identifier[configuration_group] , identifier[name] = literal[string] ) keyword[except] identifier[tb] . identifier[NodeError] : keyword[pass] identifier[global_data_table] = identifier[h5_file] . identifier[create_table] ( identifier[configuration_group] , identifier[name] = literal[string] , identifier[description] = identifier[NameValue] , identifier[title] = literal[string] ) identifier[global_data_table_row] = identifier[global_data_table] . identifier[row] identifier[global_regs] = identifier[register] . identifier[get_global_register_objects] ( identifier[readonly] = keyword[False] ) keyword[for] identifier[global_reg] keyword[in] identifier[sorted] ( identifier[global_regs] , identifier[key] = identifier[itemgetter] ( literal[string] )): identifier[global_data_table_row] [ literal[string] ]= identifier[global_reg] [ literal[string] ] identifier[global_data_table_row] [ literal[string] ]= identifier[global_reg] [ literal[string] ] identifier[global_data_table_row] . identifier[append] () identifier[global_data_table] . identifier[flush] () keyword[for] identifier[pixel_reg] keyword[in] identifier[register] . identifier[pixel_registers] . identifier[itervalues] (): keyword[try] : identifier[h5_file] . identifier[remove_node] ( identifier[configuration_group] , identifier[name] = identifier[pixel_reg] [ literal[string] ]) keyword[except] identifier[tb] . identifier[NodeError] : keyword[pass] identifier[data] = identifier[pixel_reg] [ literal[string] ]. identifier[T] identifier[atom] = identifier[tb] . identifier[Atom] . identifier[from_dtype] ( identifier[data] . identifier[dtype] ) identifier[ds] = identifier[h5_file] . identifier[create_carray] ( identifier[configuration_group] , identifier[name] = identifier[pixel_reg] [ literal[string] ], identifier[atom] = identifier[atom] , identifier[shape] = identifier[data] . identifier[shape] , identifier[title] = identifier[pixel_reg] [ literal[string] ]) identifier[ds] [:]= identifier[data] keyword[if] identifier[isinstance] ( identifier[configuration_file] , identifier[tb] . identifier[file] . identifier[File] ): identifier[h5_file] = identifier[configuration_file] identifier[save_conf] () keyword[else] : keyword[with] identifier[tb] . identifier[open_file] ( identifier[configuration_file] , identifier[mode] = literal[string] , identifier[title] = literal[string] ) keyword[as] identifier[h5_file] : identifier[save_conf] ()
def save_configuration_to_hdf5(register, configuration_file, name=''): """Saving configuration to HDF5 file from register object Parameters ---------- register : pybar.fei4.register object configuration_file : string, file Filename of the HDF5 configuration file or file object. name : string Additional identifier (subgroup). Useful when storing more than one configuration inside a HDF5 file. """ def save_conf(): logging.info('Saving configuration: %s' % h5_file.filename) register.configuration_file = h5_file.filename try: configuration_group = h5_file.create_group(h5_file.root, 'configuration') # depends on [control=['try'], data=[]] except tb.NodeError: configuration_group = h5_file.root.configuration # depends on [control=['except'], data=[]] if name: try: configuration_group = h5_file.create_group(configuration_group, name) # depends on [control=['try'], data=[]] except tb.NodeError: configuration_group = h5_file.root.configuration.name # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # calibration_parameters try: h5_file.remove_node(configuration_group, name='calibration_parameters') # depends on [control=['try'], data=[]] except tb.NodeError: pass # depends on [control=['except'], data=[]] calibration_data_table = h5_file.create_table(configuration_group, name='calibration_parameters', description=NameValue, title='calibration_parameters') calibration_data_row = calibration_data_table.row for (key, value) in register.calibration_parameters.iteritems(): calibration_data_row['name'] = key calibration_data_row['value'] = str(value) calibration_data_row.append() # depends on [control=['for'], data=[]] calibration_data_table.flush() # miscellaneous try: h5_file.remove_node(configuration_group, name='miscellaneous') # depends on [control=['try'], data=[]] except tb.NodeError: pass # depends on [control=['except'], data=[]] miscellaneous_data_table = h5_file.create_table(configuration_group, name='miscellaneous', description=NameValue, title='miscellaneous') miscellaneous_data_row = miscellaneous_data_table.row miscellaneous_data_row['name'] = 'Flavor' miscellaneous_data_row['value'] = register.flavor miscellaneous_data_row.append() miscellaneous_data_row['name'] = 'Chip_ID' miscellaneous_data_row['value'] = register.chip_id miscellaneous_data_row.append() for (key, value) in register.miscellaneous.iteritems(): miscellaneous_data_row['name'] = key miscellaneous_data_row['value'] = value miscellaneous_data_row.append() # depends on [control=['for'], data=[]] miscellaneous_data_table.flush() # global try: h5_file.remove_node(configuration_group, name='global_register') # depends on [control=['try'], data=[]] except tb.NodeError: pass # depends on [control=['except'], data=[]] global_data_table = h5_file.create_table(configuration_group, name='global_register', description=NameValue, title='global_register') global_data_table_row = global_data_table.row global_regs = register.get_global_register_objects(readonly=False) for global_reg in sorted(global_regs, key=itemgetter('name')): global_data_table_row['name'] = global_reg['name'] global_data_table_row['value'] = global_reg['value'] # TODO: some function that converts to bin, hex global_data_table_row.append() # depends on [control=['for'], data=['global_reg']] global_data_table.flush() # pixel for pixel_reg in register.pixel_registers.itervalues(): try: h5_file.remove_node(configuration_group, name=pixel_reg['name']) # depends on [control=['try'], data=[]] except tb.NodeError: pass # depends on [control=['except'], data=[]] data = pixel_reg['value'].T atom = tb.Atom.from_dtype(data.dtype) ds = h5_file.create_carray(configuration_group, name=pixel_reg['name'], atom=atom, shape=data.shape, title=pixel_reg['name']) ds[:] = data # depends on [control=['for'], data=['pixel_reg']] if isinstance(configuration_file, tb.file.File): h5_file = configuration_file save_conf() # depends on [control=['if'], data=[]] else: with tb.open_file(configuration_file, mode='a', title='') as h5_file: save_conf() # depends on [control=['with'], data=[]]
def clear_lock(self, remote=None, lock_type='update'): ''' Clear update.lk for all remotes ''' cleared = [] errors = [] for repo in self.remotes: if remote: # Specific remote URL/pattern was passed, ensure that the URL # matches or else skip this one try: if not fnmatch.fnmatch(repo.url, remote): continue except TypeError: # remote was non-string, try again if not fnmatch.fnmatch(repo.url, six.text_type(remote)): continue success, failed = repo.clear_lock(lock_type=lock_type) cleared.extend(success) errors.extend(failed) return cleared, errors
def function[clear_lock, parameter[self, remote, lock_type]]: constant[ Clear update.lk for all remotes ] variable[cleared] assign[=] list[[]] variable[errors] assign[=] list[[]] for taget[name[repo]] in starred[name[self].remotes] begin[:] if name[remote] begin[:] <ast.Try object at 0x7da1b2044e50> <ast.Tuple object at 0x7da1b20b9a20> assign[=] call[name[repo].clear_lock, parameter[]] call[name[cleared].extend, parameter[name[success]]] call[name[errors].extend, parameter[name[failed]]] return[tuple[[<ast.Name object at 0x7da1b20b8e20>, <ast.Name object at 0x7da1b20bb010>]]]
keyword[def] identifier[clear_lock] ( identifier[self] , identifier[remote] = keyword[None] , identifier[lock_type] = literal[string] ): literal[string] identifier[cleared] =[] identifier[errors] =[] keyword[for] identifier[repo] keyword[in] identifier[self] . identifier[remotes] : keyword[if] identifier[remote] : keyword[try] : keyword[if] keyword[not] identifier[fnmatch] . identifier[fnmatch] ( identifier[repo] . identifier[url] , identifier[remote] ): keyword[continue] keyword[except] identifier[TypeError] : keyword[if] keyword[not] identifier[fnmatch] . identifier[fnmatch] ( identifier[repo] . identifier[url] , identifier[six] . identifier[text_type] ( identifier[remote] )): keyword[continue] identifier[success] , identifier[failed] = identifier[repo] . identifier[clear_lock] ( identifier[lock_type] = identifier[lock_type] ) identifier[cleared] . identifier[extend] ( identifier[success] ) identifier[errors] . identifier[extend] ( identifier[failed] ) keyword[return] identifier[cleared] , identifier[errors]
def clear_lock(self, remote=None, lock_type='update'): """ Clear update.lk for all remotes """ cleared = [] errors = [] for repo in self.remotes: if remote: # Specific remote URL/pattern was passed, ensure that the URL # matches or else skip this one try: if not fnmatch.fnmatch(repo.url, remote): continue # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except TypeError: # remote was non-string, try again if not fnmatch.fnmatch(repo.url, six.text_type(remote)): continue # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] (success, failed) = repo.clear_lock(lock_type=lock_type) cleared.extend(success) errors.extend(failed) # depends on [control=['for'], data=['repo']] return (cleared, errors)
def write_tree_newick(self, filename, hide_rooted_prefix=False): '''Write this ``Tree`` to a Newick file Args: ``filename`` (``str``): Path to desired output file (plain-text or gzipped) ''' if not isinstance(filename, str): raise TypeError("filename must be a str") treestr = self.newick() if hide_rooted_prefix: if treestr.startswith('[&R]'): treestr = treestr[4:].strip() else: warn("Specified hide_rooted_prefix, but tree was not rooted") if filename.lower().endswith('.gz'): # gzipped file f = gopen(expanduser(filename),'wb',9); f.write(treestr.encode()); f.close() else: # plain-text file f = open(expanduser(filename),'w'); f.write(treestr); f.close()
def function[write_tree_newick, parameter[self, filename, hide_rooted_prefix]]: constant[Write this ``Tree`` to a Newick file Args: ``filename`` (``str``): Path to desired output file (plain-text or gzipped) ] if <ast.UnaryOp object at 0x7da1b0ed2470> begin[:] <ast.Raise object at 0x7da1b0ed2f80> variable[treestr] assign[=] call[name[self].newick, parameter[]] if name[hide_rooted_prefix] begin[:] if call[name[treestr].startswith, parameter[constant[[&R]]]] begin[:] variable[treestr] assign[=] call[call[name[treestr]][<ast.Slice object at 0x7da1b0ba70d0>].strip, parameter[]] if call[call[name[filename].lower, parameter[]].endswith, parameter[constant[.gz]]] begin[:] variable[f] assign[=] call[name[gopen], parameter[call[name[expanduser], parameter[name[filename]]], constant[wb], constant[9]]] call[name[f].write, parameter[call[name[treestr].encode, parameter[]]]] call[name[f].close, parameter[]]
keyword[def] identifier[write_tree_newick] ( identifier[self] , identifier[filename] , identifier[hide_rooted_prefix] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[filename] , identifier[str] ): keyword[raise] identifier[TypeError] ( literal[string] ) identifier[treestr] = identifier[self] . identifier[newick] () keyword[if] identifier[hide_rooted_prefix] : keyword[if] identifier[treestr] . identifier[startswith] ( literal[string] ): identifier[treestr] = identifier[treestr] [ literal[int] :]. identifier[strip] () keyword[else] : identifier[warn] ( literal[string] ) keyword[if] identifier[filename] . identifier[lower] (). identifier[endswith] ( literal[string] ): identifier[f] = identifier[gopen] ( identifier[expanduser] ( identifier[filename] ), literal[string] , literal[int] ); identifier[f] . identifier[write] ( identifier[treestr] . identifier[encode] ()); identifier[f] . identifier[close] () keyword[else] : identifier[f] = identifier[open] ( identifier[expanduser] ( identifier[filename] ), literal[string] ); identifier[f] . identifier[write] ( identifier[treestr] ); identifier[f] . identifier[close] ()
def write_tree_newick(self, filename, hide_rooted_prefix=False): """Write this ``Tree`` to a Newick file Args: ``filename`` (``str``): Path to desired output file (plain-text or gzipped) """ if not isinstance(filename, str): raise TypeError('filename must be a str') # depends on [control=['if'], data=[]] treestr = self.newick() if hide_rooted_prefix: if treestr.startswith('[&R]'): treestr = treestr[4:].strip() # depends on [control=['if'], data=[]] else: warn('Specified hide_rooted_prefix, but tree was not rooted') # depends on [control=['if'], data=[]] if filename.lower().endswith('.gz'): # gzipped file f = gopen(expanduser(filename), 'wb', 9) f.write(treestr.encode()) f.close() # depends on [control=['if'], data=[]] else: # plain-text file f = open(expanduser(filename), 'w') f.write(treestr) f.close()
def create(request): """Create a new poll""" errors = [] success = False listOfResponses = ['', '', ''] # 3 Blank lines by default title = '' description = '' id = '' if request.method == 'POST': # User saved the form # Retrieve parameters title = request.form.get('title') description = request.form.get('description') listOfResponses = [] for rep in request.form.getlist('rep[]'): if rep != '': listOfResponses.append(rep) # Test if everything is ok if title == "": errors.append("Please set a title !") if len(listOfResponses) == 0: errors.append("Please set at least one response !") # Can we save the new question ? if len(errors) == 0: # Yes. Let save data curDB.execute("INSERT INTO Poll (title, description) VALUES (?, ?)", (title, description)) # The id of the poll id = curDB.lastrowid # Insert responses for rep in listOfResponses: curDB.execute("INSERT INTO Response (pollId, title) VALUES (?, ?)", (id, rep)) coxDB.commit() success = True # Minimum of 3 lines of questions while len(listOfResponses) < 3: listOfResponses.append('') return {'errors': errors, 'success': success, 'listOfResponses': listOfResponses, 'title': title, 'description': description, 'id': id}
def function[create, parameter[request]]: constant[Create a new poll] variable[errors] assign[=] list[[]] variable[success] assign[=] constant[False] variable[listOfResponses] assign[=] list[[<ast.Constant object at 0x7da1b0b83c40>, <ast.Constant object at 0x7da1b0b82140>, <ast.Constant object at 0x7da1b0b83520>]] variable[title] assign[=] constant[] variable[description] assign[=] constant[] variable[id] assign[=] constant[] if compare[name[request].method equal[==] constant[POST]] begin[:] variable[title] assign[=] call[name[request].form.get, parameter[constant[title]]] variable[description] assign[=] call[name[request].form.get, parameter[constant[description]]] variable[listOfResponses] assign[=] list[[]] for taget[name[rep]] in starred[call[name[request].form.getlist, parameter[constant[rep[]]]]] begin[:] if compare[name[rep] not_equal[!=] constant[]] begin[:] call[name[listOfResponses].append, parameter[name[rep]]] if compare[name[title] equal[==] constant[]] begin[:] call[name[errors].append, parameter[constant[Please set a title !]]] if compare[call[name[len], parameter[name[listOfResponses]]] equal[==] constant[0]] begin[:] call[name[errors].append, parameter[constant[Please set at least one response !]]] if compare[call[name[len], parameter[name[errors]]] equal[==] constant[0]] begin[:] call[name[curDB].execute, parameter[constant[INSERT INTO Poll (title, description) VALUES (?, ?)], tuple[[<ast.Name object at 0x7da1b0b70970>, <ast.Name object at 0x7da1b0b718d0>]]]] variable[id] assign[=] name[curDB].lastrowid for taget[name[rep]] in starred[name[listOfResponses]] begin[:] call[name[curDB].execute, parameter[constant[INSERT INTO Response (pollId, title) VALUES (?, ?)], tuple[[<ast.Name object at 0x7da1b0b70bb0>, <ast.Name object at 0x7da1b0b73700>]]]] call[name[coxDB].commit, parameter[]] variable[success] assign[=] constant[True] while compare[call[name[len], parameter[name[listOfResponses]]] less[<] constant[3]] begin[:] call[name[listOfResponses].append, parameter[constant[]]] return[dictionary[[<ast.Constant object at 0x7da1b0b71f90>, <ast.Constant object at 0x7da1b0b72800>, <ast.Constant object at 0x7da1b0b72c20>, <ast.Constant object at 0x7da1b0b71390>, <ast.Constant object at 0x7da1b0b72170>, <ast.Constant object at 0x7da1b0b72710>], [<ast.Name object at 0x7da1b0b70280>, <ast.Name object at 0x7da1b0b73550>, <ast.Name object at 0x7da1b0b71810>, <ast.Name object at 0x7da1b0b70850>, <ast.Name object at 0x7da1b0b70a90>, <ast.Name object at 0x7da1b0b716c0>]]]
keyword[def] identifier[create] ( identifier[request] ): literal[string] identifier[errors] =[] identifier[success] = keyword[False] identifier[listOfResponses] =[ literal[string] , literal[string] , literal[string] ] identifier[title] = literal[string] identifier[description] = literal[string] identifier[id] = literal[string] keyword[if] identifier[request] . identifier[method] == literal[string] : identifier[title] = identifier[request] . identifier[form] . identifier[get] ( literal[string] ) identifier[description] = identifier[request] . identifier[form] . identifier[get] ( literal[string] ) identifier[listOfResponses] =[] keyword[for] identifier[rep] keyword[in] identifier[request] . identifier[form] . identifier[getlist] ( literal[string] ): keyword[if] identifier[rep] != literal[string] : identifier[listOfResponses] . identifier[append] ( identifier[rep] ) keyword[if] identifier[title] == literal[string] : identifier[errors] . identifier[append] ( literal[string] ) keyword[if] identifier[len] ( identifier[listOfResponses] )== literal[int] : identifier[errors] . identifier[append] ( literal[string] ) keyword[if] identifier[len] ( identifier[errors] )== literal[int] : identifier[curDB] . identifier[execute] ( literal[string] ,( identifier[title] , identifier[description] )) identifier[id] = identifier[curDB] . identifier[lastrowid] keyword[for] identifier[rep] keyword[in] identifier[listOfResponses] : identifier[curDB] . identifier[execute] ( literal[string] ,( identifier[id] , identifier[rep] )) identifier[coxDB] . identifier[commit] () identifier[success] = keyword[True] keyword[while] identifier[len] ( identifier[listOfResponses] )< literal[int] : identifier[listOfResponses] . identifier[append] ( literal[string] ) keyword[return] { literal[string] : identifier[errors] , literal[string] : identifier[success] , literal[string] : identifier[listOfResponses] , literal[string] : identifier[title] , literal[string] : identifier[description] , literal[string] : identifier[id] }
def create(request): """Create a new poll""" errors = [] success = False listOfResponses = ['', '', ''] # 3 Blank lines by default title = '' description = '' id = '' if request.method == 'POST': # User saved the form # Retrieve parameters title = request.form.get('title') description = request.form.get('description') listOfResponses = [] for rep in request.form.getlist('rep[]'): if rep != '': listOfResponses.append(rep) # depends on [control=['if'], data=['rep']] # depends on [control=['for'], data=['rep']] # Test if everything is ok if title == '': errors.append('Please set a title !') # depends on [control=['if'], data=[]] if len(listOfResponses) == 0: errors.append('Please set at least one response !') # depends on [control=['if'], data=[]] # Can we save the new question ? if len(errors) == 0: # Yes. Let save data curDB.execute('INSERT INTO Poll (title, description) VALUES (?, ?)', (title, description)) # The id of the poll id = curDB.lastrowid # Insert responses for rep in listOfResponses: curDB.execute('INSERT INTO Response (pollId, title) VALUES (?, ?)', (id, rep)) # depends on [control=['for'], data=['rep']] coxDB.commit() success = True # depends on [control=['if'], data=[]] # Minimum of 3 lines of questions while len(listOfResponses) < 3: listOfResponses.append('') # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]] return {'errors': errors, 'success': success, 'listOfResponses': listOfResponses, 'title': title, 'description': description, 'id': id}
def afterExecSheet(self, sheet, escaped, err): 'Records currentActiveRow' if not self.currentActiveRow: # nothing to record return if err: self.currentActiveRow[-1] += ' [%s]' % err if isLoggableSheet(sheet): # don't record jumps to cmdlog or other internal sheets # remove user-aborted commands and simple movements if not escaped and isLoggableCommand(self.currentActiveRow.keystrokes, self.currentActiveRow.longname): self.addRow(self.currentActiveRow) if options.cmdlog_histfile: if not getattr(vd(), 'sessionlog', None): vd().sessionlog = loadInternalSheet(CommandLog, Path(date().strftime(options.cmdlog_histfile))) append_tsv_row(vd().sessionlog, self.currentActiveRow) self.currentActiveRow = None
def function[afterExecSheet, parameter[self, sheet, escaped, err]]: constant[Records currentActiveRow] if <ast.UnaryOp object at 0x7da1b26addb0> begin[:] return[None] if name[err] begin[:] <ast.AugAssign object at 0x7da1b26aeef0> if call[name[isLoggableSheet], parameter[name[sheet]]] begin[:] if <ast.BoolOp object at 0x7da1b26af5e0> begin[:] call[name[self].addRow, parameter[name[self].currentActiveRow]] if name[options].cmdlog_histfile begin[:] if <ast.UnaryOp object at 0x7da1b26ace20> begin[:] call[name[vd], parameter[]].sessionlog assign[=] call[name[loadInternalSheet], parameter[name[CommandLog], call[name[Path], parameter[call[call[name[date], parameter[]].strftime, parameter[name[options].cmdlog_histfile]]]]]] call[name[append_tsv_row], parameter[call[name[vd], parameter[]].sessionlog, name[self].currentActiveRow]] name[self].currentActiveRow assign[=] constant[None]
keyword[def] identifier[afterExecSheet] ( identifier[self] , identifier[sheet] , identifier[escaped] , identifier[err] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[currentActiveRow] : keyword[return] keyword[if] identifier[err] : identifier[self] . identifier[currentActiveRow] [- literal[int] ]+= literal[string] % identifier[err] keyword[if] identifier[isLoggableSheet] ( identifier[sheet] ): keyword[if] keyword[not] identifier[escaped] keyword[and] identifier[isLoggableCommand] ( identifier[self] . identifier[currentActiveRow] . identifier[keystrokes] , identifier[self] . identifier[currentActiveRow] . identifier[longname] ): identifier[self] . identifier[addRow] ( identifier[self] . identifier[currentActiveRow] ) keyword[if] identifier[options] . identifier[cmdlog_histfile] : keyword[if] keyword[not] identifier[getattr] ( identifier[vd] (), literal[string] , keyword[None] ): identifier[vd] (). identifier[sessionlog] = identifier[loadInternalSheet] ( identifier[CommandLog] , identifier[Path] ( identifier[date] (). identifier[strftime] ( identifier[options] . identifier[cmdlog_histfile] ))) identifier[append_tsv_row] ( identifier[vd] (). identifier[sessionlog] , identifier[self] . identifier[currentActiveRow] ) identifier[self] . identifier[currentActiveRow] = keyword[None]
def afterExecSheet(self, sheet, escaped, err): """Records currentActiveRow""" if not self.currentActiveRow: # nothing to record return # depends on [control=['if'], data=[]] if err: self.currentActiveRow[-1] += ' [%s]' % err # depends on [control=['if'], data=[]] if isLoggableSheet(sheet): # don't record jumps to cmdlog or other internal sheets # remove user-aborted commands and simple movements if not escaped and isLoggableCommand(self.currentActiveRow.keystrokes, self.currentActiveRow.longname): self.addRow(self.currentActiveRow) if options.cmdlog_histfile: if not getattr(vd(), 'sessionlog', None): vd().sessionlog = loadInternalSheet(CommandLog, Path(date().strftime(options.cmdlog_histfile))) # depends on [control=['if'], data=[]] append_tsv_row(vd().sessionlog, self.currentActiveRow) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] self.currentActiveRow = None
def dump(self): """Serialize the state of this InMemoryStorageEngine to a dict. Returns: dict: The serialized data. """ return { u'storage_data': [x.asdict() for x in self.storage_data], u'streaming_data': [x.asdict() for x in self.streaming_data] }
def function[dump, parameter[self]]: constant[Serialize the state of this InMemoryStorageEngine to a dict. Returns: dict: The serialized data. ] return[dictionary[[<ast.Constant object at 0x7da20c992800>, <ast.Constant object at 0x7da20c9909a0>], [<ast.ListComp object at 0x7da20c991570>, <ast.ListComp object at 0x7da20e957400>]]]
keyword[def] identifier[dump] ( identifier[self] ): literal[string] keyword[return] { literal[string] :[ identifier[x] . identifier[asdict] () keyword[for] identifier[x] keyword[in] identifier[self] . identifier[storage_data] ], literal[string] :[ identifier[x] . identifier[asdict] () keyword[for] identifier[x] keyword[in] identifier[self] . identifier[streaming_data] ] }
def dump(self): """Serialize the state of this InMemoryStorageEngine to a dict. Returns: dict: The serialized data. """ return {u'storage_data': [x.asdict() for x in self.storage_data], u'streaming_data': [x.asdict() for x in self.streaming_data]}
def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ if self.parenthesis: return '{}({})'.format(' ' * indent, pretty_str(self.value)) return pretty_str(self.value, indent=indent)
def function[pretty_str, parameter[self, indent]]: constant[Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. ] if name[self].parenthesis begin[:] return[call[constant[{}({})].format, parameter[binary_operation[constant[ ] * name[indent]], call[name[pretty_str], parameter[name[self].value]]]]] return[call[name[pretty_str], parameter[name[self].value]]]
keyword[def] identifier[pretty_str] ( identifier[self] , identifier[indent] = literal[int] ): literal[string] keyword[if] identifier[self] . identifier[parenthesis] : keyword[return] literal[string] . identifier[format] ( literal[string] * identifier[indent] , identifier[pretty_str] ( identifier[self] . identifier[value] )) keyword[return] identifier[pretty_str] ( identifier[self] . identifier[value] , identifier[indent] = identifier[indent] )
def pretty_str(self, indent=0): """Return a human-readable string representation of this object. Kwargs: indent (int): The amount of spaces to use as indentation. """ if self.parenthesis: return '{}({})'.format(' ' * indent, pretty_str(self.value)) # depends on [control=['if'], data=[]] return pretty_str(self.value, indent=indent)
def _validate_cmds(self): """ 确保 cmd 没有重复 :return: """ cmd_list = list(self.rule_map.keys()) for bp in self.blueprints: cmd_list.extend(bp.rule_map.keys()) duplicate_cmds = (Counter(cmd_list) - Counter(set(cmd_list))).keys() assert not duplicate_cmds, 'duplicate cmds: %s' % duplicate_cmds
def function[_validate_cmds, parameter[self]]: constant[ 确保 cmd 没有重复 :return: ] variable[cmd_list] assign[=] call[name[list], parameter[call[name[self].rule_map.keys, parameter[]]]] for taget[name[bp]] in starred[name[self].blueprints] begin[:] call[name[cmd_list].extend, parameter[call[name[bp].rule_map.keys, parameter[]]]] variable[duplicate_cmds] assign[=] call[binary_operation[call[name[Counter], parameter[name[cmd_list]]] - call[name[Counter], parameter[call[name[set], parameter[name[cmd_list]]]]]].keys, parameter[]] assert[<ast.UnaryOp object at 0x7da1b0aed330>]
keyword[def] identifier[_validate_cmds] ( identifier[self] ): literal[string] identifier[cmd_list] = identifier[list] ( identifier[self] . identifier[rule_map] . identifier[keys] ()) keyword[for] identifier[bp] keyword[in] identifier[self] . identifier[blueprints] : identifier[cmd_list] . identifier[extend] ( identifier[bp] . identifier[rule_map] . identifier[keys] ()) identifier[duplicate_cmds] =( identifier[Counter] ( identifier[cmd_list] )- identifier[Counter] ( identifier[set] ( identifier[cmd_list] ))). identifier[keys] () keyword[assert] keyword[not] identifier[duplicate_cmds] , literal[string] % identifier[duplicate_cmds]
def _validate_cmds(self): """ 确保 cmd 没有重复 :return: """ cmd_list = list(self.rule_map.keys()) for bp in self.blueprints: cmd_list.extend(bp.rule_map.keys()) # depends on [control=['for'], data=['bp']] duplicate_cmds = (Counter(cmd_list) - Counter(set(cmd_list))).keys() assert not duplicate_cmds, 'duplicate cmds: %s' % duplicate_cmds
def usetz_now(): """Determine current time depending on USE_TZ setting. Affects Django 1.4 and above only. if `USE_TZ = True`, then returns current time according to timezone, else returns current UTC time. """ USE_TZ = getattr(settings, 'USE_TZ', False) if USE_TZ and DJANGO_VERSION >= '1.4': return now() else: return datetime.utcnow()
def function[usetz_now, parameter[]]: constant[Determine current time depending on USE_TZ setting. Affects Django 1.4 and above only. if `USE_TZ = True`, then returns current time according to timezone, else returns current UTC time. ] variable[USE_TZ] assign[=] call[name[getattr], parameter[name[settings], constant[USE_TZ], constant[False]]] if <ast.BoolOp object at 0x7da1b13cfa00> begin[:] return[call[name[now], parameter[]]]
keyword[def] identifier[usetz_now] (): literal[string] identifier[USE_TZ] = identifier[getattr] ( identifier[settings] , literal[string] , keyword[False] ) keyword[if] identifier[USE_TZ] keyword[and] identifier[DJANGO_VERSION] >= literal[string] : keyword[return] identifier[now] () keyword[else] : keyword[return] identifier[datetime] . identifier[utcnow] ()
def usetz_now(): """Determine current time depending on USE_TZ setting. Affects Django 1.4 and above only. if `USE_TZ = True`, then returns current time according to timezone, else returns current UTC time. """ USE_TZ = getattr(settings, 'USE_TZ', False) if USE_TZ and DJANGO_VERSION >= '1.4': return now() # depends on [control=['if'], data=[]] else: return datetime.utcnow()
def playURI(self, uri): """Play a Spotify uri, for example spotify:track:5Yn8WCB4Dqm8snemB5Mu4K :param uri: Playlist, Artist, Album, or Song Uri """ url: str = get_url("/remote/play.json") params = { "oauth": self._oauth_token, "csrf": self._csrf_token, "uri": uri, "context": uri, } r = self._request(url=url, params=params) return r.json()
def function[playURI, parameter[self, uri]]: constant[Play a Spotify uri, for example spotify:track:5Yn8WCB4Dqm8snemB5Mu4K :param uri: Playlist, Artist, Album, or Song Uri ] <ast.AnnAssign object at 0x7da1b25240a0> variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b25269e0>, <ast.Constant object at 0x7da1b2526cb0>, <ast.Constant object at 0x7da1b2526c20>, <ast.Constant object at 0x7da1b2526b90>], [<ast.Attribute object at 0x7da1b2526950>, <ast.Attribute object at 0x7da1b2526aa0>, <ast.Name object at 0x7da1b2526980>, <ast.Name object at 0x7da1b25279a0>]] variable[r] assign[=] call[name[self]._request, parameter[]] return[call[name[r].json, parameter[]]]
keyword[def] identifier[playURI] ( identifier[self] , identifier[uri] ): literal[string] identifier[url] : identifier[str] = identifier[get_url] ( literal[string] ) identifier[params] ={ literal[string] : identifier[self] . identifier[_oauth_token] , literal[string] : identifier[self] . identifier[_csrf_token] , literal[string] : identifier[uri] , literal[string] : identifier[uri] , } identifier[r] = identifier[self] . identifier[_request] ( identifier[url] = identifier[url] , identifier[params] = identifier[params] ) keyword[return] identifier[r] . identifier[json] ()
def playURI(self, uri): """Play a Spotify uri, for example spotify:track:5Yn8WCB4Dqm8snemB5Mu4K :param uri: Playlist, Artist, Album, or Song Uri """ url: str = get_url('/remote/play.json') params = {'oauth': self._oauth_token, 'csrf': self._csrf_token, 'uri': uri, 'context': uri} r = self._request(url=url, params=params) return r.json()
def _validate(self, writing=False): """Verify that the box obeys the specifications.""" if ((len(self.bits_per_component) != len(self.signed)) or (len(self.signed) != self.palette.shape[1])): msg = ("The length of the 'bits_per_component' and the 'signed' " "members must equal the number of columns of the palette.") self._dispatch_validation_error(msg, writing=writing) bps = self.bits_per_component if writing and not all(b == bps[0] for b in bps): # We don't support writing palettes with bit depths that are # different. msg = "Writing palettes with varying bit depths is not supported." self._dispatch_validation_error(msg, writing=writing)
def function[_validate, parameter[self, writing]]: constant[Verify that the box obeys the specifications.] if <ast.BoolOp object at 0x7da18bccb5b0> begin[:] variable[msg] assign[=] constant[The length of the 'bits_per_component' and the 'signed' members must equal the number of columns of the palette.] call[name[self]._dispatch_validation_error, parameter[name[msg]]] variable[bps] assign[=] name[self].bits_per_component if <ast.BoolOp object at 0x7da204623400> begin[:] variable[msg] assign[=] constant[Writing palettes with varying bit depths is not supported.] call[name[self]._dispatch_validation_error, parameter[name[msg]]]
keyword[def] identifier[_validate] ( identifier[self] , identifier[writing] = keyword[False] ): literal[string] keyword[if] (( identifier[len] ( identifier[self] . identifier[bits_per_component] )!= identifier[len] ( identifier[self] . identifier[signed] )) keyword[or] ( identifier[len] ( identifier[self] . identifier[signed] )!= identifier[self] . identifier[palette] . identifier[shape] [ literal[int] ])): identifier[msg] =( literal[string] literal[string] ) identifier[self] . identifier[_dispatch_validation_error] ( identifier[msg] , identifier[writing] = identifier[writing] ) identifier[bps] = identifier[self] . identifier[bits_per_component] keyword[if] identifier[writing] keyword[and] keyword[not] identifier[all] ( identifier[b] == identifier[bps] [ literal[int] ] keyword[for] identifier[b] keyword[in] identifier[bps] ): identifier[msg] = literal[string] identifier[self] . identifier[_dispatch_validation_error] ( identifier[msg] , identifier[writing] = identifier[writing] )
def _validate(self, writing=False): """Verify that the box obeys the specifications.""" if len(self.bits_per_component) != len(self.signed) or len(self.signed) != self.palette.shape[1]: msg = "The length of the 'bits_per_component' and the 'signed' members must equal the number of columns of the palette." self._dispatch_validation_error(msg, writing=writing) # depends on [control=['if'], data=[]] bps = self.bits_per_component if writing and (not all((b == bps[0] for b in bps))): # We don't support writing palettes with bit depths that are # different. msg = 'Writing palettes with varying bit depths is not supported.' self._dispatch_validation_error(msg, writing=writing) # depends on [control=['if'], data=[]]
def public_notes_500(self, key, value): """Populate the ``public_notes`` key.""" return [ { 'source': value.get('9'), 'value': public_note, } for public_note in force_list(value.get('a')) ]
def function[public_notes_500, parameter[self, key, value]]: constant[Populate the ``public_notes`` key.] return[<ast.ListComp object at 0x7da18f8136a0>]
keyword[def] identifier[public_notes_500] ( identifier[self] , identifier[key] , identifier[value] ): literal[string] keyword[return] [ { literal[string] : identifier[value] . identifier[get] ( literal[string] ), literal[string] : identifier[public_note] , } keyword[for] identifier[public_note] keyword[in] identifier[force_list] ( identifier[value] . identifier[get] ( literal[string] )) ]
def public_notes_500(self, key, value): """Populate the ``public_notes`` key.""" return [{'source': value.get('9'), 'value': public_note} for public_note in force_list(value.get('a'))]
def _add_merged_attributes(node, all_recipes, all_roles): """Merges attributes from cookbooks, node and roles Chef Attribute precedence: http://docs.opscode.com/essentials_cookbook_attribute_files.html#attribute-precedence LittleChef implements, in precedence order: - Cookbook default - Environment default - Role default - Node normal - Role override - Environment override NOTE: In order for cookbook attributes to be read, they need to be correctly defined in its metadata.json """ # Get cookbooks from extended recipes attributes = {} for recipe in node['recipes']: # Find this recipe found = False for r in all_recipes: if recipe == r['name']: found = True for attr in r['attributes']: if r['attributes'][attr].get('type') == "hash": value = {} else: value = r['attributes'][attr].get('default') # Attribute dictionaries are defined as a single # compound key. Split and build proper dict build_dct(attributes, attr.split("/"), value) if not found: error = "Could not find recipe '{0}' while ".format(recipe) error += "building node data bag for '{0}'".format(node['name']) abort(error) # Get default role attributes for role in node['roles']: for r in all_roles: if role == r['name']: update_dct(attributes, r.get('default_attributes', {})) # Get default environment attributes environment = lib.get_environment(node['chef_environment']) update_dct(attributes, environment.get('default_attributes', {})) # Get normal node attributes non_attribute_fields = [ 'id', 'name', 'role', 'roles', 'recipes', 'run_list', 'ipaddress'] node_attributes = {} for key in node: if key in non_attribute_fields: continue node_attributes[key] = node[key] update_dct(attributes, node_attributes) # Get override role attributes for role in node['roles']: for r in all_roles: if role == r['name']: update_dct(attributes, r.get('override_attributes', {})) # Get override environment attributes update_dct(attributes, environment.get('override_attributes', {})) # Merge back to the original node object node.update(attributes)
def function[_add_merged_attributes, parameter[node, all_recipes, all_roles]]: constant[Merges attributes from cookbooks, node and roles Chef Attribute precedence: http://docs.opscode.com/essentials_cookbook_attribute_files.html#attribute-precedence LittleChef implements, in precedence order: - Cookbook default - Environment default - Role default - Node normal - Role override - Environment override NOTE: In order for cookbook attributes to be read, they need to be correctly defined in its metadata.json ] variable[attributes] assign[=] dictionary[[], []] for taget[name[recipe]] in starred[call[name[node]][constant[recipes]]] begin[:] variable[found] assign[=] constant[False] for taget[name[r]] in starred[name[all_recipes]] begin[:] if compare[name[recipe] equal[==] call[name[r]][constant[name]]] begin[:] variable[found] assign[=] constant[True] for taget[name[attr]] in starred[call[name[r]][constant[attributes]]] begin[:] if compare[call[call[call[name[r]][constant[attributes]]][name[attr]].get, parameter[constant[type]]] equal[==] constant[hash]] begin[:] variable[value] assign[=] dictionary[[], []] call[name[build_dct], parameter[name[attributes], call[name[attr].split, parameter[constant[/]]], name[value]]] if <ast.UnaryOp object at 0x7da20c6e5420> begin[:] variable[error] assign[=] call[constant[Could not find recipe '{0}' while ].format, parameter[name[recipe]]] <ast.AugAssign object at 0x7da20c9907f0> call[name[abort], parameter[name[error]]] for taget[name[role]] in starred[call[name[node]][constant[roles]]] begin[:] for taget[name[r]] in starred[name[all_roles]] begin[:] if compare[name[role] equal[==] call[name[r]][constant[name]]] begin[:] call[name[update_dct], parameter[name[attributes], call[name[r].get, parameter[constant[default_attributes], dictionary[[], []]]]]] variable[environment] assign[=] call[name[lib].get_environment, parameter[call[name[node]][constant[chef_environment]]]] call[name[update_dct], parameter[name[attributes], call[name[environment].get, parameter[constant[default_attributes], dictionary[[], []]]]]] variable[non_attribute_fields] assign[=] list[[<ast.Constant object at 0x7da2054a6f50>, <ast.Constant object at 0x7da2054a6890>, <ast.Constant object at 0x7da2054a55a0>, <ast.Constant object at 0x7da2054a6ef0>, <ast.Constant object at 0x7da2054a6bf0>, <ast.Constant object at 0x7da2054a46a0>, <ast.Constant object at 0x7da2054a79a0>]] variable[node_attributes] assign[=] dictionary[[], []] for taget[name[key]] in starred[name[node]] begin[:] if compare[name[key] in name[non_attribute_fields]] begin[:] continue call[name[node_attributes]][name[key]] assign[=] call[name[node]][name[key]] call[name[update_dct], parameter[name[attributes], name[node_attributes]]] for taget[name[role]] in starred[call[name[node]][constant[roles]]] begin[:] for taget[name[r]] in starred[name[all_roles]] begin[:] if compare[name[role] equal[==] call[name[r]][constant[name]]] begin[:] call[name[update_dct], parameter[name[attributes], call[name[r].get, parameter[constant[override_attributes], dictionary[[], []]]]]] call[name[update_dct], parameter[name[attributes], call[name[environment].get, parameter[constant[override_attributes], dictionary[[], []]]]]] call[name[node].update, parameter[name[attributes]]]
keyword[def] identifier[_add_merged_attributes] ( identifier[node] , identifier[all_recipes] , identifier[all_roles] ): literal[string] identifier[attributes] ={} keyword[for] identifier[recipe] keyword[in] identifier[node] [ literal[string] ]: identifier[found] = keyword[False] keyword[for] identifier[r] keyword[in] identifier[all_recipes] : keyword[if] identifier[recipe] == identifier[r] [ literal[string] ]: identifier[found] = keyword[True] keyword[for] identifier[attr] keyword[in] identifier[r] [ literal[string] ]: keyword[if] identifier[r] [ literal[string] ][ identifier[attr] ]. identifier[get] ( literal[string] )== literal[string] : identifier[value] ={} keyword[else] : identifier[value] = identifier[r] [ literal[string] ][ identifier[attr] ]. identifier[get] ( literal[string] ) identifier[build_dct] ( identifier[attributes] , identifier[attr] . identifier[split] ( literal[string] ), identifier[value] ) keyword[if] keyword[not] identifier[found] : identifier[error] = literal[string] . identifier[format] ( identifier[recipe] ) identifier[error] += literal[string] . identifier[format] ( identifier[node] [ literal[string] ]) identifier[abort] ( identifier[error] ) keyword[for] identifier[role] keyword[in] identifier[node] [ literal[string] ]: keyword[for] identifier[r] keyword[in] identifier[all_roles] : keyword[if] identifier[role] == identifier[r] [ literal[string] ]: identifier[update_dct] ( identifier[attributes] , identifier[r] . identifier[get] ( literal[string] ,{})) identifier[environment] = identifier[lib] . identifier[get_environment] ( identifier[node] [ literal[string] ]) identifier[update_dct] ( identifier[attributes] , identifier[environment] . identifier[get] ( literal[string] ,{})) identifier[non_attribute_fields] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] identifier[node_attributes] ={} keyword[for] identifier[key] keyword[in] identifier[node] : keyword[if] identifier[key] keyword[in] identifier[non_attribute_fields] : keyword[continue] identifier[node_attributes] [ identifier[key] ]= identifier[node] [ identifier[key] ] identifier[update_dct] ( identifier[attributes] , identifier[node_attributes] ) keyword[for] identifier[role] keyword[in] identifier[node] [ literal[string] ]: keyword[for] identifier[r] keyword[in] identifier[all_roles] : keyword[if] identifier[role] == identifier[r] [ literal[string] ]: identifier[update_dct] ( identifier[attributes] , identifier[r] . identifier[get] ( literal[string] ,{})) identifier[update_dct] ( identifier[attributes] , identifier[environment] . identifier[get] ( literal[string] ,{})) identifier[node] . identifier[update] ( identifier[attributes] )
def _add_merged_attributes(node, all_recipes, all_roles): """Merges attributes from cookbooks, node and roles Chef Attribute precedence: http://docs.opscode.com/essentials_cookbook_attribute_files.html#attribute-precedence LittleChef implements, in precedence order: - Cookbook default - Environment default - Role default - Node normal - Role override - Environment override NOTE: In order for cookbook attributes to be read, they need to be correctly defined in its metadata.json """ # Get cookbooks from extended recipes attributes = {} for recipe in node['recipes']: # Find this recipe found = False for r in all_recipes: if recipe == r['name']: found = True for attr in r['attributes']: if r['attributes'][attr].get('type') == 'hash': value = {} # depends on [control=['if'], data=[]] else: value = r['attributes'][attr].get('default') # Attribute dictionaries are defined as a single # compound key. Split and build proper dict build_dct(attributes, attr.split('/'), value) # depends on [control=['for'], data=['attr']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['r']] if not found: error = "Could not find recipe '{0}' while ".format(recipe) error += "building node data bag for '{0}'".format(node['name']) abort(error) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['recipe']] # Get default role attributes for role in node['roles']: for r in all_roles: if role == r['name']: update_dct(attributes, r.get('default_attributes', {})) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['r']] # depends on [control=['for'], data=['role']] # Get default environment attributes environment = lib.get_environment(node['chef_environment']) update_dct(attributes, environment.get('default_attributes', {})) # Get normal node attributes non_attribute_fields = ['id', 'name', 'role', 'roles', 'recipes', 'run_list', 'ipaddress'] node_attributes = {} for key in node: if key in non_attribute_fields: continue # depends on [control=['if'], data=[]] node_attributes[key] = node[key] # depends on [control=['for'], data=['key']] update_dct(attributes, node_attributes) # Get override role attributes for role in node['roles']: for r in all_roles: if role == r['name']: update_dct(attributes, r.get('override_attributes', {})) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['r']] # depends on [control=['for'], data=['role']] # Get override environment attributes update_dct(attributes, environment.get('override_attributes', {})) # Merge back to the original node object node.update(attributes)
def get_account_by_b58_address(self, b58_address: str, password: str) -> Account: """ :param b58_address: a base58 encode address. :param password: a password which is used to decrypt the encrypted private key. :return: """ acct = self.get_account_data_by_b58_address(b58_address) n = self.wallet_in_mem.scrypt.n salt = base64.b64decode(acct.salt) private_key = Account.get_gcm_decoded_private_key(acct.key, password, b58_address, salt, n, self.scheme) return Account(private_key, self.scheme)
def function[get_account_by_b58_address, parameter[self, b58_address, password]]: constant[ :param b58_address: a base58 encode address. :param password: a password which is used to decrypt the encrypted private key. :return: ] variable[acct] assign[=] call[name[self].get_account_data_by_b58_address, parameter[name[b58_address]]] variable[n] assign[=] name[self].wallet_in_mem.scrypt.n variable[salt] assign[=] call[name[base64].b64decode, parameter[name[acct].salt]] variable[private_key] assign[=] call[name[Account].get_gcm_decoded_private_key, parameter[name[acct].key, name[password], name[b58_address], name[salt], name[n], name[self].scheme]] return[call[name[Account], parameter[name[private_key], name[self].scheme]]]
keyword[def] identifier[get_account_by_b58_address] ( identifier[self] , identifier[b58_address] : identifier[str] , identifier[password] : identifier[str] )-> identifier[Account] : literal[string] identifier[acct] = identifier[self] . identifier[get_account_data_by_b58_address] ( identifier[b58_address] ) identifier[n] = identifier[self] . identifier[wallet_in_mem] . identifier[scrypt] . identifier[n] identifier[salt] = identifier[base64] . identifier[b64decode] ( identifier[acct] . identifier[salt] ) identifier[private_key] = identifier[Account] . identifier[get_gcm_decoded_private_key] ( identifier[acct] . identifier[key] , identifier[password] , identifier[b58_address] , identifier[salt] , identifier[n] , identifier[self] . identifier[scheme] ) keyword[return] identifier[Account] ( identifier[private_key] , identifier[self] . identifier[scheme] )
def get_account_by_b58_address(self, b58_address: str, password: str) -> Account: """ :param b58_address: a base58 encode address. :param password: a password which is used to decrypt the encrypted private key. :return: """ acct = self.get_account_data_by_b58_address(b58_address) n = self.wallet_in_mem.scrypt.n salt = base64.b64decode(acct.salt) private_key = Account.get_gcm_decoded_private_key(acct.key, password, b58_address, salt, n, self.scheme) return Account(private_key, self.scheme)
def _GenerateZipInfo(self, arcname=None, compress_type=None, st=None): """Generate ZipInfo instance for the given name, compression and stat. Args: arcname: The name in the archive this should take. compress_type: Compression type (zipfile.ZIP_DEFLATED, or ZIP_STORED) st: An optional stat object to be used for setting headers. Returns: ZipInfo instance. Raises: ValueError: If arcname is not provided. """ # Fake stat response. if st is None: # TODO(user):pytype: stat_result typing is not correct. # pytype: disable=wrong-arg-count st = os.stat_result((0o100644, 0, 0, 0, 0, 0, 0, 0, 0, 0)) # pytype: enable=wrong-arg-count mtime = time.localtime(st.st_mtime or time.time()) date_time = mtime[0:6] # Create ZipInfo instance to store file information if arcname is None: raise ValueError("An arcname must be provided.") zinfo = zipfile.ZipInfo(arcname, date_time) zinfo.external_attr = (st[0] & 0xFFFF) << 16 # Unix attributes if compress_type is None: zinfo.compress_type = self._compression else: zinfo.compress_type = compress_type zinfo.file_size = 0 zinfo.compress_size = 0 zinfo.flag_bits = 0x08 # Setting data descriptor flag. zinfo.CRC = 0x08074b50 # Predefined CRC for archives using data # descriptors. # This fills an empty Info-ZIP Unix extra field. zinfo.extra = struct.pack( "<HHIIHH", 0x5855, 12, 0, # time of last access (UTC/GMT) 0, # time of last modification (UTC/GMT) 0, # user ID 0) # group ID return zinfo
def function[_GenerateZipInfo, parameter[self, arcname, compress_type, st]]: constant[Generate ZipInfo instance for the given name, compression and stat. Args: arcname: The name in the archive this should take. compress_type: Compression type (zipfile.ZIP_DEFLATED, or ZIP_STORED) st: An optional stat object to be used for setting headers. Returns: ZipInfo instance. Raises: ValueError: If arcname is not provided. ] if compare[name[st] is constant[None]] begin[:] variable[st] assign[=] call[name[os].stat_result, parameter[tuple[[<ast.Constant object at 0x7da1b1c0f190>, <ast.Constant object at 0x7da1b1c0ece0>, <ast.Constant object at 0x7da1b1c0eef0>, <ast.Constant object at 0x7da1b1c0f0a0>, <ast.Constant object at 0x7da1b1c0db10>, <ast.Constant object at 0x7da1b1c0cf10>, <ast.Constant object at 0x7da1b1c0cf70>, <ast.Constant object at 0x7da1b1c0ce20>, <ast.Constant object at 0x7da1b1c0da20>, <ast.Constant object at 0x7da1b1c0cd60>]]]] variable[mtime] assign[=] call[name[time].localtime, parameter[<ast.BoolOp object at 0x7da1b1c3efb0>]] variable[date_time] assign[=] call[name[mtime]][<ast.Slice object at 0x7da1b1c3edd0>] if compare[name[arcname] is constant[None]] begin[:] <ast.Raise object at 0x7da1b1c3e230> variable[zinfo] assign[=] call[name[zipfile].ZipInfo, parameter[name[arcname], name[date_time]]] name[zinfo].external_attr assign[=] binary_operation[binary_operation[call[name[st]][constant[0]] <ast.BitAnd object at 0x7da2590d6b60> constant[65535]] <ast.LShift object at 0x7da2590d69e0> constant[16]] if compare[name[compress_type] is constant[None]] begin[:] name[zinfo].compress_type assign[=] name[self]._compression name[zinfo].file_size assign[=] constant[0] name[zinfo].compress_size assign[=] constant[0] name[zinfo].flag_bits assign[=] constant[8] name[zinfo].CRC assign[=] constant[134695760] name[zinfo].extra assign[=] call[name[struct].pack, parameter[constant[<HHIIHH], constant[22613], constant[12], constant[0], constant[0], constant[0], constant[0]]] return[name[zinfo]]
keyword[def] identifier[_GenerateZipInfo] ( identifier[self] , identifier[arcname] = keyword[None] , identifier[compress_type] = keyword[None] , identifier[st] = keyword[None] ): literal[string] keyword[if] identifier[st] keyword[is] keyword[None] : identifier[st] = identifier[os] . identifier[stat_result] (( literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] )) identifier[mtime] = identifier[time] . identifier[localtime] ( identifier[st] . identifier[st_mtime] keyword[or] identifier[time] . identifier[time] ()) identifier[date_time] = identifier[mtime] [ literal[int] : literal[int] ] keyword[if] identifier[arcname] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[zinfo] = identifier[zipfile] . identifier[ZipInfo] ( identifier[arcname] , identifier[date_time] ) identifier[zinfo] . identifier[external_attr] =( identifier[st] [ literal[int] ]& literal[int] )<< literal[int] keyword[if] identifier[compress_type] keyword[is] keyword[None] : identifier[zinfo] . identifier[compress_type] = identifier[self] . identifier[_compression] keyword[else] : identifier[zinfo] . identifier[compress_type] = identifier[compress_type] identifier[zinfo] . identifier[file_size] = literal[int] identifier[zinfo] . identifier[compress_size] = literal[int] identifier[zinfo] . identifier[flag_bits] = literal[int] identifier[zinfo] . identifier[CRC] = literal[int] identifier[zinfo] . identifier[extra] = identifier[struct] . identifier[pack] ( literal[string] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ) keyword[return] identifier[zinfo]
def _GenerateZipInfo(self, arcname=None, compress_type=None, st=None): """Generate ZipInfo instance for the given name, compression and stat. Args: arcname: The name in the archive this should take. compress_type: Compression type (zipfile.ZIP_DEFLATED, or ZIP_STORED) st: An optional stat object to be used for setting headers. Returns: ZipInfo instance. Raises: ValueError: If arcname is not provided. """ # Fake stat response. if st is None: # TODO(user):pytype: stat_result typing is not correct. # pytype: disable=wrong-arg-count st = os.stat_result((33188, 0, 0, 0, 0, 0, 0, 0, 0, 0)) # depends on [control=['if'], data=['st']] # pytype: enable=wrong-arg-count mtime = time.localtime(st.st_mtime or time.time()) date_time = mtime[0:6] # Create ZipInfo instance to store file information if arcname is None: raise ValueError('An arcname must be provided.') # depends on [control=['if'], data=[]] zinfo = zipfile.ZipInfo(arcname, date_time) zinfo.external_attr = (st[0] & 65535) << 16 # Unix attributes if compress_type is None: zinfo.compress_type = self._compression # depends on [control=['if'], data=[]] else: zinfo.compress_type = compress_type zinfo.file_size = 0 zinfo.compress_size = 0 zinfo.flag_bits = 8 # Setting data descriptor flag. zinfo.CRC = 134695760 # Predefined CRC for archives using data # descriptors. # This fills an empty Info-ZIP Unix extra field. # time of last access (UTC/GMT) # time of last modification (UTC/GMT) # user ID zinfo.extra = struct.pack('<HHIIHH', 22613, 12, 0, 0, 0, 0) # group ID return zinfo
def build_mine_matrix(self, w, h, minenum): """random fill cells with mines and increments nearest mines num in adiacent cells""" self.minecount = 0 matrix = [[Cell(30, 30, x, y, self) for x in range(w)] for y in range(h)] for i in range(0, minenum): x = random.randint(0, w - 1) y = random.randint(0, h - 1) if matrix[y][x].has_mine: continue self.minecount += 1 matrix[y][x].has_mine = True for coord in [[-1, -1], [-1, 0], [-1, 1], [0, -1], [0, 1], [1, -1], [1, 0], [1, 1]]: _x, _y = coord if not self.coord_in_map(x + _x, y + _y, w, h): continue matrix[y + _y][x + _x].add_nearest_mine() return matrix
def function[build_mine_matrix, parameter[self, w, h, minenum]]: constant[random fill cells with mines and increments nearest mines num in adiacent cells] name[self].minecount assign[=] constant[0] variable[matrix] assign[=] <ast.ListComp object at 0x7da2046215a0> for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[minenum]]]] begin[:] variable[x] assign[=] call[name[random].randint, parameter[constant[0], binary_operation[name[w] - constant[1]]]] variable[y] assign[=] call[name[random].randint, parameter[constant[0], binary_operation[name[h] - constant[1]]]] if call[call[name[matrix]][name[y]]][name[x]].has_mine begin[:] continue <ast.AugAssign object at 0x7da18dc98190> call[call[name[matrix]][name[y]]][name[x]].has_mine assign[=] constant[True] for taget[name[coord]] in starred[list[[<ast.List object at 0x7da18dc986a0>, <ast.List object at 0x7da18dc99ba0>, <ast.List object at 0x7da18dc9ac50>, <ast.List object at 0x7da18dc9b040>, <ast.List object at 0x7da18dc9b4c0>, <ast.List object at 0x7da18dc98460>, <ast.List object at 0x7da18dc9b490>, <ast.List object at 0x7da18dc9a980>]]] begin[:] <ast.Tuple object at 0x7da18dc99090> assign[=] name[coord] if <ast.UnaryOp object at 0x7da18dc99f60> begin[:] continue call[call[call[name[matrix]][binary_operation[name[y] + name[_y]]]][binary_operation[name[x] + name[_x]]].add_nearest_mine, parameter[]] return[name[matrix]]
keyword[def] identifier[build_mine_matrix] ( identifier[self] , identifier[w] , identifier[h] , identifier[minenum] ): literal[string] identifier[self] . identifier[minecount] = literal[int] identifier[matrix] =[[ identifier[Cell] ( literal[int] , literal[int] , identifier[x] , identifier[y] , identifier[self] ) keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[w] )] keyword[for] identifier[y] keyword[in] identifier[range] ( identifier[h] )] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[minenum] ): identifier[x] = identifier[random] . identifier[randint] ( literal[int] , identifier[w] - literal[int] ) identifier[y] = identifier[random] . identifier[randint] ( literal[int] , identifier[h] - literal[int] ) keyword[if] identifier[matrix] [ identifier[y] ][ identifier[x] ]. identifier[has_mine] : keyword[continue] identifier[self] . identifier[minecount] += literal[int] identifier[matrix] [ identifier[y] ][ identifier[x] ]. identifier[has_mine] = keyword[True] keyword[for] identifier[coord] keyword[in] [[- literal[int] ,- literal[int] ],[- literal[int] , literal[int] ],[- literal[int] , literal[int] ],[ literal[int] ,- literal[int] ],[ literal[int] , literal[int] ],[ literal[int] ,- literal[int] ],[ literal[int] , literal[int] ],[ literal[int] , literal[int] ]]: identifier[_x] , identifier[_y] = identifier[coord] keyword[if] keyword[not] identifier[self] . identifier[coord_in_map] ( identifier[x] + identifier[_x] , identifier[y] + identifier[_y] , identifier[w] , identifier[h] ): keyword[continue] identifier[matrix] [ identifier[y] + identifier[_y] ][ identifier[x] + identifier[_x] ]. identifier[add_nearest_mine] () keyword[return] identifier[matrix]
def build_mine_matrix(self, w, h, minenum): """random fill cells with mines and increments nearest mines num in adiacent cells""" self.minecount = 0 matrix = [[Cell(30, 30, x, y, self) for x in range(w)] for y in range(h)] for i in range(0, minenum): x = random.randint(0, w - 1) y = random.randint(0, h - 1) if matrix[y][x].has_mine: continue # depends on [control=['if'], data=[]] self.minecount += 1 matrix[y][x].has_mine = True for coord in [[-1, -1], [-1, 0], [-1, 1], [0, -1], [0, 1], [1, -1], [1, 0], [1, 1]]: (_x, _y) = coord if not self.coord_in_map(x + _x, y + _y, w, h): continue # depends on [control=['if'], data=[]] matrix[y + _y][x + _x].add_nearest_mine() # depends on [control=['for'], data=['coord']] # depends on [control=['for'], data=[]] return matrix
def read(*parts): """Reads the content of the file located at path created from *parts*.""" try: return io.open(os.path.join(*parts), 'r', encoding='utf-8').read() except IOError: return ''
def function[read, parameter[]]: constant[Reads the content of the file located at path created from *parts*.] <ast.Try object at 0x7da18eb56bc0>
keyword[def] identifier[read] (* identifier[parts] ): literal[string] keyword[try] : keyword[return] identifier[io] . identifier[open] ( identifier[os] . identifier[path] . identifier[join] (* identifier[parts] ), literal[string] , identifier[encoding] = literal[string] ). identifier[read] () keyword[except] identifier[IOError] : keyword[return] literal[string]
def read(*parts): """Reads the content of the file located at path created from *parts*.""" try: return io.open(os.path.join(*parts), 'r', encoding='utf-8').read() # depends on [control=['try'], data=[]] except IOError: return '' # depends on [control=['except'], data=[]]
def get_sorted_nts_omit_section(self, hdrgo_prt, hdrgo_sort): """Return a flat list of sections (wo/section names) with GO terms grouped and sorted.""" nts_flat = [] # print("SSSS SorterNts:get_sorted_nts_omit_section(hdrgo_prt={}, hdrgo_sort={})".format( # hdrgo_prt, hdrgo_sort)) hdrgos_seen = set() hdrgos_actual = self.sortgos.grprobj.get_hdrgos() for _, section_hdrgos_all in self.sections: #section_hdrgos_act = set(section_hdrgos_all).intersection(hdrgos_actual) section_hdrgos_act = [h for h in section_hdrgos_all if h in hdrgos_actual] hdrgos_seen |= set(section_hdrgos_act) self.sortgos.get_sorted_hdrgo2usrgos( section_hdrgos_act, nts_flat, hdrgo_prt, hdrgo_sort) remaining_hdrgos = set(self.sortgos.grprobj.get_hdrgos()).difference(hdrgos_seen) self.sortgos.get_sorted_hdrgo2usrgos(remaining_hdrgos, nts_flat, hdrgo_prt, hdrgo_sort) return nts_flat
def function[get_sorted_nts_omit_section, parameter[self, hdrgo_prt, hdrgo_sort]]: constant[Return a flat list of sections (wo/section names) with GO terms grouped and sorted.] variable[nts_flat] assign[=] list[[]] variable[hdrgos_seen] assign[=] call[name[set], parameter[]] variable[hdrgos_actual] assign[=] call[name[self].sortgos.grprobj.get_hdrgos, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b26acd90>, <ast.Name object at 0x7da1b26af430>]]] in starred[name[self].sections] begin[:] variable[section_hdrgos_act] assign[=] <ast.ListComp object at 0x7da1b26ae4d0> <ast.AugAssign object at 0x7da1b26af640> call[name[self].sortgos.get_sorted_hdrgo2usrgos, parameter[name[section_hdrgos_act], name[nts_flat], name[hdrgo_prt], name[hdrgo_sort]]] variable[remaining_hdrgos] assign[=] call[call[name[set], parameter[call[name[self].sortgos.grprobj.get_hdrgos, parameter[]]]].difference, parameter[name[hdrgos_seen]]] call[name[self].sortgos.get_sorted_hdrgo2usrgos, parameter[name[remaining_hdrgos], name[nts_flat], name[hdrgo_prt], name[hdrgo_sort]]] return[name[nts_flat]]
keyword[def] identifier[get_sorted_nts_omit_section] ( identifier[self] , identifier[hdrgo_prt] , identifier[hdrgo_sort] ): literal[string] identifier[nts_flat] =[] identifier[hdrgos_seen] = identifier[set] () identifier[hdrgos_actual] = identifier[self] . identifier[sortgos] . identifier[grprobj] . identifier[get_hdrgos] () keyword[for] identifier[_] , identifier[section_hdrgos_all] keyword[in] identifier[self] . identifier[sections] : identifier[section_hdrgos_act] =[ identifier[h] keyword[for] identifier[h] keyword[in] identifier[section_hdrgos_all] keyword[if] identifier[h] keyword[in] identifier[hdrgos_actual] ] identifier[hdrgos_seen] |= identifier[set] ( identifier[section_hdrgos_act] ) identifier[self] . identifier[sortgos] . identifier[get_sorted_hdrgo2usrgos] ( identifier[section_hdrgos_act] , identifier[nts_flat] , identifier[hdrgo_prt] , identifier[hdrgo_sort] ) identifier[remaining_hdrgos] = identifier[set] ( identifier[self] . identifier[sortgos] . identifier[grprobj] . identifier[get_hdrgos] ()). identifier[difference] ( identifier[hdrgos_seen] ) identifier[self] . identifier[sortgos] . identifier[get_sorted_hdrgo2usrgos] ( identifier[remaining_hdrgos] , identifier[nts_flat] , identifier[hdrgo_prt] , identifier[hdrgo_sort] ) keyword[return] identifier[nts_flat]
def get_sorted_nts_omit_section(self, hdrgo_prt, hdrgo_sort): """Return a flat list of sections (wo/section names) with GO terms grouped and sorted.""" nts_flat = [] # print("SSSS SorterNts:get_sorted_nts_omit_section(hdrgo_prt={}, hdrgo_sort={})".format( # hdrgo_prt, hdrgo_sort)) hdrgos_seen = set() hdrgos_actual = self.sortgos.grprobj.get_hdrgos() for (_, section_hdrgos_all) in self.sections: #section_hdrgos_act = set(section_hdrgos_all).intersection(hdrgos_actual) section_hdrgos_act = [h for h in section_hdrgos_all if h in hdrgos_actual] hdrgos_seen |= set(section_hdrgos_act) self.sortgos.get_sorted_hdrgo2usrgos(section_hdrgos_act, nts_flat, hdrgo_prt, hdrgo_sort) # depends on [control=['for'], data=[]] remaining_hdrgos = set(self.sortgos.grprobj.get_hdrgos()).difference(hdrgos_seen) self.sortgos.get_sorted_hdrgo2usrgos(remaining_hdrgos, nts_flat, hdrgo_prt, hdrgo_sort) return nts_flat
def deleteEndpoint(self,ep,cbfn=""): ''' Send DELETE message to an endpoint. :param str ep: name of endpoint :param fnptr cbfn: Optional - callback funtion to call when operation is completed :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' result = asyncResult(callback=cbfn) result.endpoint = ep data = self._deleteURL("/endpoints/"+ep) if data.status_code == 200: #immediate success result.error = False result.is_done = True elif data.status_code == 202: self.database['async-responses'][json.loads(data.content)["async-response-id"]]= result else: result.error = response_codes("resource",data.status_code) result.is_done = True result.raw_data = data.content result.status_code = data.status_code return result
def function[deleteEndpoint, parameter[self, ep, cbfn]]: constant[ Send DELETE message to an endpoint. :param str ep: name of endpoint :param fnptr cbfn: Optional - callback funtion to call when operation is completed :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ] variable[result] assign[=] call[name[asyncResult], parameter[]] name[result].endpoint assign[=] name[ep] variable[data] assign[=] call[name[self]._deleteURL, parameter[binary_operation[constant[/endpoints/] + name[ep]]]] if compare[name[data].status_code equal[==] constant[200]] begin[:] name[result].error assign[=] constant[False] name[result].is_done assign[=] constant[True] name[result].raw_data assign[=] name[data].content name[result].status_code assign[=] name[data].status_code return[name[result]]
keyword[def] identifier[deleteEndpoint] ( identifier[self] , identifier[ep] , identifier[cbfn] = literal[string] ): literal[string] identifier[result] = identifier[asyncResult] ( identifier[callback] = identifier[cbfn] ) identifier[result] . identifier[endpoint] = identifier[ep] identifier[data] = identifier[self] . identifier[_deleteURL] ( literal[string] + identifier[ep] ) keyword[if] identifier[data] . identifier[status_code] == literal[int] : identifier[result] . identifier[error] = keyword[False] identifier[result] . identifier[is_done] = keyword[True] keyword[elif] identifier[data] . identifier[status_code] == literal[int] : identifier[self] . identifier[database] [ literal[string] ][ identifier[json] . identifier[loads] ( identifier[data] . identifier[content] )[ literal[string] ]]= identifier[result] keyword[else] : identifier[result] . identifier[error] = identifier[response_codes] ( literal[string] , identifier[data] . identifier[status_code] ) identifier[result] . identifier[is_done] = keyword[True] identifier[result] . identifier[raw_data] = identifier[data] . identifier[content] identifier[result] . identifier[status_code] = identifier[data] . identifier[status_code] keyword[return] identifier[result]
def deleteEndpoint(self, ep, cbfn=''): """ Send DELETE message to an endpoint. :param str ep: name of endpoint :param fnptr cbfn: Optional - callback funtion to call when operation is completed :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult """ result = asyncResult(callback=cbfn) result.endpoint = ep data = self._deleteURL('/endpoints/' + ep) if data.status_code == 200: #immediate success result.error = False result.is_done = True # depends on [control=['if'], data=[]] elif data.status_code == 202: self.database['async-responses'][json.loads(data.content)['async-response-id']] = result # depends on [control=['if'], data=[]] else: result.error = response_codes('resource', data.status_code) result.is_done = True result.raw_data = data.content result.status_code = data.status_code return result
def receive_empty(self, empty, transaction): """ Manage the observe feature to remove a client in case of a RST message receveide in reply to a notification. :type empty: Message :param empty: the received message :type transaction: Transaction :param transaction: the transaction that owns the notification message :rtype : Transaction :return: the modified transaction """ if empty.type == defines.Types["RST"]: host, port = transaction.request.source key_token = hash(str(host) + str(port) + str(transaction.request.token)) logger.info("Remove Subscriber") try: del self._relations[key_token] except KeyError: pass transaction.completed = True return transaction
def function[receive_empty, parameter[self, empty, transaction]]: constant[ Manage the observe feature to remove a client in case of a RST message receveide in reply to a notification. :type empty: Message :param empty: the received message :type transaction: Transaction :param transaction: the transaction that owns the notification message :rtype : Transaction :return: the modified transaction ] if compare[name[empty].type equal[==] call[name[defines].Types][constant[RST]]] begin[:] <ast.Tuple object at 0x7da20c6a80a0> assign[=] name[transaction].request.source variable[key_token] assign[=] call[name[hash], parameter[binary_operation[binary_operation[call[name[str], parameter[name[host]]] + call[name[str], parameter[name[port]]]] + call[name[str], parameter[name[transaction].request.token]]]]] call[name[logger].info, parameter[constant[Remove Subscriber]]] <ast.Try object at 0x7da20c6ab6a0> name[transaction].completed assign[=] constant[True] return[name[transaction]]
keyword[def] identifier[receive_empty] ( identifier[self] , identifier[empty] , identifier[transaction] ): literal[string] keyword[if] identifier[empty] . identifier[type] == identifier[defines] . identifier[Types] [ literal[string] ]: identifier[host] , identifier[port] = identifier[transaction] . identifier[request] . identifier[source] identifier[key_token] = identifier[hash] ( identifier[str] ( identifier[host] )+ identifier[str] ( identifier[port] )+ identifier[str] ( identifier[transaction] . identifier[request] . identifier[token] )) identifier[logger] . identifier[info] ( literal[string] ) keyword[try] : keyword[del] identifier[self] . identifier[_relations] [ identifier[key_token] ] keyword[except] identifier[KeyError] : keyword[pass] identifier[transaction] . identifier[completed] = keyword[True] keyword[return] identifier[transaction]
def receive_empty(self, empty, transaction): """ Manage the observe feature to remove a client in case of a RST message receveide in reply to a notification. :type empty: Message :param empty: the received message :type transaction: Transaction :param transaction: the transaction that owns the notification message :rtype : Transaction :return: the modified transaction """ if empty.type == defines.Types['RST']: (host, port) = transaction.request.source key_token = hash(str(host) + str(port) + str(transaction.request.token)) logger.info('Remove Subscriber') try: del self._relations[key_token] # depends on [control=['try'], data=[]] except KeyError: pass # depends on [control=['except'], data=[]] transaction.completed = True # depends on [control=['if'], data=[]] return transaction