code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def set_fixed_image(self, image): """ Set Fixed ANTsImage for metric """ if not isinstance(image, iio.ANTsImage): raise ValueError('image must be ANTsImage type') if image.dimension != self.dimension: raise ValueError('image dim (%i) does not match metric dim (%i)' % (image.dimension, self.dimension)) self._metric.setFixedImage(image.pointer, False) self.fixed_image = image
def function[set_fixed_image, parameter[self, image]]: constant[ Set Fixed ANTsImage for metric ] if <ast.UnaryOp object at 0x7da20c6a8b80> begin[:] <ast.Raise object at 0x7da20c6a86d0> if compare[name[image].dimension not_equal[!=] name[self].dimension] begin[:] <ast.Raise object at 0x7da1b155d570> call[name[self]._metric.setFixedImage, parameter[name[image].pointer, constant[False]]] name[self].fixed_image assign[=] name[image]
keyword[def] identifier[set_fixed_image] ( identifier[self] , identifier[image] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[image] , identifier[iio] . identifier[ANTsImage] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[image] . identifier[dimension] != identifier[self] . identifier[dimension] : keyword[raise] identifier[ValueError] ( literal[string] %( identifier[image] . identifier[dimension] , identifier[self] . identifier[dimension] )) identifier[self] . identifier[_metric] . identifier[setFixedImage] ( identifier[image] . identifier[pointer] , keyword[False] ) identifier[self] . identifier[fixed_image] = identifier[image]
def set_fixed_image(self, image): """ Set Fixed ANTsImage for metric """ if not isinstance(image, iio.ANTsImage): raise ValueError('image must be ANTsImage type') # depends on [control=['if'], data=[]] if image.dimension != self.dimension: raise ValueError('image dim (%i) does not match metric dim (%i)' % (image.dimension, self.dimension)) # depends on [control=['if'], data=[]] self._metric.setFixedImage(image.pointer, False) self.fixed_image = image
async def _write_link_secret_label(self, label) -> None: """ Update non-secret storage record with link secret label. :param label: link secret label """ LOGGER.debug('Wallet._write_link_secret_label <<< %s', label) if await self.get_link_secret_label() == label: LOGGER.info('Wallet._write_link_secret_label abstaining - already current') else: await self.write_non_secret(StorageRecord( TYPE_LINK_SECRET_LABEL, label, tags=None, ident=str(int(time())))) # indy requires str LOGGER.debug('Wallet._write_link_secret_label <<<')
<ast.AsyncFunctionDef object at 0x7da20e9b0490>
keyword[async] keyword[def] identifier[_write_link_secret_label] ( identifier[self] , identifier[label] )-> keyword[None] : literal[string] identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[label] ) keyword[if] keyword[await] identifier[self] . identifier[get_link_secret_label] ()== identifier[label] : identifier[LOGGER] . identifier[info] ( literal[string] ) keyword[else] : keyword[await] identifier[self] . identifier[write_non_secret] ( identifier[StorageRecord] ( identifier[TYPE_LINK_SECRET_LABEL] , identifier[label] , identifier[tags] = keyword[None] , identifier[ident] = identifier[str] ( identifier[int] ( identifier[time] ())))) identifier[LOGGER] . identifier[debug] ( literal[string] )
async def _write_link_secret_label(self, label) -> None: """ Update non-secret storage record with link secret label. :param label: link secret label """ LOGGER.debug('Wallet._write_link_secret_label <<< %s', label) if await self.get_link_secret_label() == label: LOGGER.info('Wallet._write_link_secret_label abstaining - already current') # depends on [control=['if'], data=[]] else: await self.write_non_secret(StorageRecord(TYPE_LINK_SECRET_LABEL, label, tags=None, ident=str(int(time())))) # indy requires str LOGGER.debug('Wallet._write_link_secret_label <<<')
def update(self, auth_payload=values.unset): """ Update the ChallengeInstance :param unicode auth_payload: Optional payload to verify the Challenge :returns: Updated ChallengeInstance :rtype: twilio.rest.authy.v1.service.entity.factor.challenge.ChallengeInstance """ return self._proxy.update(auth_payload=auth_payload, )
def function[update, parameter[self, auth_payload]]: constant[ Update the ChallengeInstance :param unicode auth_payload: Optional payload to verify the Challenge :returns: Updated ChallengeInstance :rtype: twilio.rest.authy.v1.service.entity.factor.challenge.ChallengeInstance ] return[call[name[self]._proxy.update, parameter[]]]
keyword[def] identifier[update] ( identifier[self] , identifier[auth_payload] = identifier[values] . identifier[unset] ): literal[string] keyword[return] identifier[self] . identifier[_proxy] . identifier[update] ( identifier[auth_payload] = identifier[auth_payload] ,)
def update(self, auth_payload=values.unset): """ Update the ChallengeInstance :param unicode auth_payload: Optional payload to verify the Challenge :returns: Updated ChallengeInstance :rtype: twilio.rest.authy.v1.service.entity.factor.challenge.ChallengeInstance """ return self._proxy.update(auth_payload=auth_payload)
def handle_template(self, template, subdir): """ Use yacms's project template by default. The method of picking the default directory is copied from Django's TemplateCommand. """ if template is None: return six.text_type(os.path.join(yacms.__path__[0], subdir)) return super(Command, self).handle_template(template, subdir)
def function[handle_template, parameter[self, template, subdir]]: constant[ Use yacms's project template by default. The method of picking the default directory is copied from Django's TemplateCommand. ] if compare[name[template] is constant[None]] begin[:] return[call[name[six].text_type, parameter[call[name[os].path.join, parameter[call[name[yacms].__path__][constant[0]], name[subdir]]]]]] return[call[call[name[super], parameter[name[Command], name[self]]].handle_template, parameter[name[template], name[subdir]]]]
keyword[def] identifier[handle_template] ( identifier[self] , identifier[template] , identifier[subdir] ): literal[string] keyword[if] identifier[template] keyword[is] keyword[None] : keyword[return] identifier[six] . identifier[text_type] ( identifier[os] . identifier[path] . identifier[join] ( identifier[yacms] . identifier[__path__] [ literal[int] ], identifier[subdir] )) keyword[return] identifier[super] ( identifier[Command] , identifier[self] ). identifier[handle_template] ( identifier[template] , identifier[subdir] )
def handle_template(self, template, subdir): """ Use yacms's project template by default. The method of picking the default directory is copied from Django's TemplateCommand. """ if template is None: return six.text_type(os.path.join(yacms.__path__[0], subdir)) # depends on [control=['if'], data=[]] return super(Command, self).handle_template(template, subdir)
def error_response(response): """ Raises errors matching the response code """ if response.status_code >= 500: raise exceptions.GeocodioServerError elif response.status_code == 403: raise exceptions.GeocodioAuthError elif response.status_code == 422: raise exceptions.GeocodioDataError(response.json()["error"]) else: raise exceptions.GeocodioError( "Unknown service error (HTTP {0})".format(response.status_code) )
def function[error_response, parameter[response]]: constant[ Raises errors matching the response code ] if compare[name[response].status_code greater_or_equal[>=] constant[500]] begin[:] <ast.Raise object at 0x7da1b1b9d030>
keyword[def] identifier[error_response] ( identifier[response] ): literal[string] keyword[if] identifier[response] . identifier[status_code] >= literal[int] : keyword[raise] identifier[exceptions] . identifier[GeocodioServerError] keyword[elif] identifier[response] . identifier[status_code] == literal[int] : keyword[raise] identifier[exceptions] . identifier[GeocodioAuthError] keyword[elif] identifier[response] . identifier[status_code] == literal[int] : keyword[raise] identifier[exceptions] . identifier[GeocodioDataError] ( identifier[response] . identifier[json] ()[ literal[string] ]) keyword[else] : keyword[raise] identifier[exceptions] . identifier[GeocodioError] ( literal[string] . identifier[format] ( identifier[response] . identifier[status_code] ) )
def error_response(response): """ Raises errors matching the response code """ if response.status_code >= 500: raise exceptions.GeocodioServerError # depends on [control=['if'], data=[]] elif response.status_code == 403: raise exceptions.GeocodioAuthError # depends on [control=['if'], data=[]] elif response.status_code == 422: raise exceptions.GeocodioDataError(response.json()['error']) # depends on [control=['if'], data=[]] else: raise exceptions.GeocodioError('Unknown service error (HTTP {0})'.format(response.status_code))
def Contradiction(expr1: Expression, expr2: Expression) -> Expression: """Return expression which is the contradiction of `expr1` and `expr2`.""" expr = Disjunction(Conjunction(expr1, Negation(expr2)), Conjunction(Negation(expr1), expr2)) return ast.fix_missing_locations(expr)
def function[Contradiction, parameter[expr1, expr2]]: constant[Return expression which is the contradiction of `expr1` and `expr2`.] variable[expr] assign[=] call[name[Disjunction], parameter[call[name[Conjunction], parameter[name[expr1], call[name[Negation], parameter[name[expr2]]]]], call[name[Conjunction], parameter[call[name[Negation], parameter[name[expr1]]], name[expr2]]]]] return[call[name[ast].fix_missing_locations, parameter[name[expr]]]]
keyword[def] identifier[Contradiction] ( identifier[expr1] : identifier[Expression] , identifier[expr2] : identifier[Expression] )-> identifier[Expression] : literal[string] identifier[expr] = identifier[Disjunction] ( identifier[Conjunction] ( identifier[expr1] , identifier[Negation] ( identifier[expr2] )), identifier[Conjunction] ( identifier[Negation] ( identifier[expr1] ), identifier[expr2] )) keyword[return] identifier[ast] . identifier[fix_missing_locations] ( identifier[expr] )
def Contradiction(expr1: Expression, expr2: Expression) -> Expression: """Return expression which is the contradiction of `expr1` and `expr2`.""" expr = Disjunction(Conjunction(expr1, Negation(expr2)), Conjunction(Negation(expr1), expr2)) return ast.fix_missing_locations(expr)
def to_array(self): """ Serializes this InlineQueryResultAudio to a dictionary. :return: dictionary representation of this object. :rtype: dict """ array = super(InlineQueryResultAudio, self).to_array() # 'type' and 'id' given by superclass array['audio_url'] = u(self.audio_url) # py2: type unicode, py3: type str array['title'] = u(self.title) # py2: type unicode, py3: type str if self.caption is not None: array['caption'] = u(self.caption) # py2: type unicode, py3: type str if self.parse_mode is not None: array['parse_mode'] = u(self.parse_mode) # py2: type unicode, py3: type str if self.performer is not None: array['performer'] = u(self.performer) # py2: type unicode, py3: type str if self.audio_duration is not None: array['audio_duration'] = int(self.audio_duration) # type int if self.reply_markup is not None: array['reply_markup'] = self.reply_markup.to_array() # type InlineKeyboardMarkup if self.input_message_content is not None: array['input_message_content'] = self.input_message_content.to_array() # type InputMessageContent return array
def function[to_array, parameter[self]]: constant[ Serializes this InlineQueryResultAudio to a dictionary. :return: dictionary representation of this object. :rtype: dict ] variable[array] assign[=] call[call[name[super], parameter[name[InlineQueryResultAudio], name[self]]].to_array, parameter[]] call[name[array]][constant[audio_url]] assign[=] call[name[u], parameter[name[self].audio_url]] call[name[array]][constant[title]] assign[=] call[name[u], parameter[name[self].title]] if compare[name[self].caption is_not constant[None]] begin[:] call[name[array]][constant[caption]] assign[=] call[name[u], parameter[name[self].caption]] if compare[name[self].parse_mode is_not constant[None]] begin[:] call[name[array]][constant[parse_mode]] assign[=] call[name[u], parameter[name[self].parse_mode]] if compare[name[self].performer is_not constant[None]] begin[:] call[name[array]][constant[performer]] assign[=] call[name[u], parameter[name[self].performer]] if compare[name[self].audio_duration is_not constant[None]] begin[:] call[name[array]][constant[audio_duration]] assign[=] call[name[int], parameter[name[self].audio_duration]] if compare[name[self].reply_markup is_not constant[None]] begin[:] call[name[array]][constant[reply_markup]] assign[=] call[name[self].reply_markup.to_array, parameter[]] if compare[name[self].input_message_content is_not constant[None]] begin[:] call[name[array]][constant[input_message_content]] assign[=] call[name[self].input_message_content.to_array, parameter[]] return[name[array]]
keyword[def] identifier[to_array] ( identifier[self] ): literal[string] identifier[array] = identifier[super] ( identifier[InlineQueryResultAudio] , identifier[self] ). identifier[to_array] () identifier[array] [ literal[string] ]= identifier[u] ( identifier[self] . identifier[audio_url] ) identifier[array] [ literal[string] ]= identifier[u] ( identifier[self] . identifier[title] ) keyword[if] identifier[self] . identifier[caption] keyword[is] keyword[not] keyword[None] : identifier[array] [ literal[string] ]= identifier[u] ( identifier[self] . identifier[caption] ) keyword[if] identifier[self] . identifier[parse_mode] keyword[is] keyword[not] keyword[None] : identifier[array] [ literal[string] ]= identifier[u] ( identifier[self] . identifier[parse_mode] ) keyword[if] identifier[self] . identifier[performer] keyword[is] keyword[not] keyword[None] : identifier[array] [ literal[string] ]= identifier[u] ( identifier[self] . identifier[performer] ) keyword[if] identifier[self] . identifier[audio_duration] keyword[is] keyword[not] keyword[None] : identifier[array] [ literal[string] ]= identifier[int] ( identifier[self] . identifier[audio_duration] ) keyword[if] identifier[self] . identifier[reply_markup] keyword[is] keyword[not] keyword[None] : identifier[array] [ literal[string] ]= identifier[self] . identifier[reply_markup] . identifier[to_array] () keyword[if] identifier[self] . identifier[input_message_content] keyword[is] keyword[not] keyword[None] : identifier[array] [ literal[string] ]= identifier[self] . identifier[input_message_content] . identifier[to_array] () keyword[return] identifier[array]
def to_array(self): """ Serializes this InlineQueryResultAudio to a dictionary. :return: dictionary representation of this object. :rtype: dict """ array = super(InlineQueryResultAudio, self).to_array() # 'type' and 'id' given by superclass array['audio_url'] = u(self.audio_url) # py2: type unicode, py3: type str array['title'] = u(self.title) # py2: type unicode, py3: type str if self.caption is not None: array['caption'] = u(self.caption) # py2: type unicode, py3: type str # depends on [control=['if'], data=[]] if self.parse_mode is not None: array['parse_mode'] = u(self.parse_mode) # py2: type unicode, py3: type str # depends on [control=['if'], data=[]] if self.performer is not None: array['performer'] = u(self.performer) # py2: type unicode, py3: type str # depends on [control=['if'], data=[]] if self.audio_duration is not None: array['audio_duration'] = int(self.audio_duration) # type int # depends on [control=['if'], data=[]] if self.reply_markup is not None: array['reply_markup'] = self.reply_markup.to_array() # type InlineKeyboardMarkup # depends on [control=['if'], data=[]] if self.input_message_content is not None: array['input_message_content'] = self.input_message_content.to_array() # type InputMessageContent # depends on [control=['if'], data=[]] return array
def _install_iana_config(cls): """ Download `iana-domains-db.json` if not present. """ # We initiate the link to the iana configuration. # It is not hard coded because this method is called only if we # are sure that the configuration file exist. iana_link = PyFunceble.CONFIGURATION["links"]["iana"] # We update the link according to our current version. iana_link = Version(True).right_url_from_version(iana_link) # We set the destination of the downloaded file. destination = PyFunceble.CURRENT_DIRECTORY + "iana-domains-db.json" if not Version(True).is_cloned() or not PyFunceble.path.isfile(destination): # The current version is not the cloned version. # We Download the link content and return the download status. return Download(iana_link, destination).text() # We are in the cloned version. # We do not need to download the file, so we are returning None. return None
def function[_install_iana_config, parameter[cls]]: constant[ Download `iana-domains-db.json` if not present. ] variable[iana_link] assign[=] call[call[name[PyFunceble].CONFIGURATION][constant[links]]][constant[iana]] variable[iana_link] assign[=] call[call[name[Version], parameter[constant[True]]].right_url_from_version, parameter[name[iana_link]]] variable[destination] assign[=] binary_operation[name[PyFunceble].CURRENT_DIRECTORY + constant[iana-domains-db.json]] if <ast.BoolOp object at 0x7da20e954c70> begin[:] return[call[call[name[Download], parameter[name[iana_link], name[destination]]].text, parameter[]]] return[constant[None]]
keyword[def] identifier[_install_iana_config] ( identifier[cls] ): literal[string] identifier[iana_link] = identifier[PyFunceble] . identifier[CONFIGURATION] [ literal[string] ][ literal[string] ] identifier[iana_link] = identifier[Version] ( keyword[True] ). identifier[right_url_from_version] ( identifier[iana_link] ) identifier[destination] = identifier[PyFunceble] . identifier[CURRENT_DIRECTORY] + literal[string] keyword[if] keyword[not] identifier[Version] ( keyword[True] ). identifier[is_cloned] () keyword[or] keyword[not] identifier[PyFunceble] . identifier[path] . identifier[isfile] ( identifier[destination] ): keyword[return] identifier[Download] ( identifier[iana_link] , identifier[destination] ). identifier[text] () keyword[return] keyword[None]
def _install_iana_config(cls): """ Download `iana-domains-db.json` if not present. """ # We initiate the link to the iana configuration. # It is not hard coded because this method is called only if we # are sure that the configuration file exist. iana_link = PyFunceble.CONFIGURATION['links']['iana'] # We update the link according to our current version. iana_link = Version(True).right_url_from_version(iana_link) # We set the destination of the downloaded file. destination = PyFunceble.CURRENT_DIRECTORY + 'iana-domains-db.json' if not Version(True).is_cloned() or not PyFunceble.path.isfile(destination): # The current version is not the cloned version. # We Download the link content and return the download status. return Download(iana_link, destination).text() # depends on [control=['if'], data=[]] # We are in the cloned version. # We do not need to download the file, so we are returning None. return None
def copy(self): """Return a shallow copy of the instance.""" rv = self.__class__(self.capacity) rv._mapping.update(self._mapping) rv._queue = deque(self._queue) return rv
def function[copy, parameter[self]]: constant[Return a shallow copy of the instance.] variable[rv] assign[=] call[name[self].__class__, parameter[name[self].capacity]] call[name[rv]._mapping.update, parameter[name[self]._mapping]] name[rv]._queue assign[=] call[name[deque], parameter[name[self]._queue]] return[name[rv]]
keyword[def] identifier[copy] ( identifier[self] ): literal[string] identifier[rv] = identifier[self] . identifier[__class__] ( identifier[self] . identifier[capacity] ) identifier[rv] . identifier[_mapping] . identifier[update] ( identifier[self] . identifier[_mapping] ) identifier[rv] . identifier[_queue] = identifier[deque] ( identifier[self] . identifier[_queue] ) keyword[return] identifier[rv]
def copy(self): """Return a shallow copy of the instance.""" rv = self.__class__(self.capacity) rv._mapping.update(self._mapping) rv._queue = deque(self._queue) return rv
def write_terminal_win(matrix, version, border=None): # pragma: no cover """\ Function to write a QR Code to a MS Windows terminal. :param matrix: The matrix to serialize. :param int version: The (Micro) QR code version :param int border: Integer indicating the size of the quiet zone. If set to ``None`` (default), the recommended border size will be used (``4`` for QR Codes, ``2`` for a Micro QR Codes). """ import sys import struct import ctypes write = sys.stdout.write std_out = ctypes.windll.kernel32.GetStdHandle(-11) csbi = ctypes.create_string_buffer(22) res = ctypes.windll.kernel32.GetConsoleScreenBufferInfo(std_out, csbi) if not res: raise OSError('Cannot find information about the console. ' 'Not running on the command line?') default_color = struct.unpack(b'hhhhHhhhhhh', csbi.raw)[4] set_color = partial(ctypes.windll.kernel32.SetConsoleTextAttribute, std_out) colours = (240, default_color) for row in matrix_iter(matrix, version, scale=1, border=border): prev_bit = -1 cnt = 0 for bit in row: if bit == prev_bit: cnt += 1 else: if cnt: set_color(colours[prev_bit]) write(' ' * cnt) prev_bit = bit cnt = 1 if cnt: set_color(colours[prev_bit]) write(' ' * cnt) set_color(default_color) # reset color write('\n')
def function[write_terminal_win, parameter[matrix, version, border]]: constant[ Function to write a QR Code to a MS Windows terminal. :param matrix: The matrix to serialize. :param int version: The (Micro) QR code version :param int border: Integer indicating the size of the quiet zone. If set to ``None`` (default), the recommended border size will be used (``4`` for QR Codes, ``2`` for a Micro QR Codes). ] import module[sys] import module[struct] import module[ctypes] variable[write] assign[=] name[sys].stdout.write variable[std_out] assign[=] call[name[ctypes].windll.kernel32.GetStdHandle, parameter[<ast.UnaryOp object at 0x7da1b2346b30>]] variable[csbi] assign[=] call[name[ctypes].create_string_buffer, parameter[constant[22]]] variable[res] assign[=] call[name[ctypes].windll.kernel32.GetConsoleScreenBufferInfo, parameter[name[std_out], name[csbi]]] if <ast.UnaryOp object at 0x7da1b2346b00> begin[:] <ast.Raise object at 0x7da1b2345810> variable[default_color] assign[=] call[call[name[struct].unpack, parameter[constant[b'hhhhHhhhhhh'], name[csbi].raw]]][constant[4]] variable[set_color] assign[=] call[name[partial], parameter[name[ctypes].windll.kernel32.SetConsoleTextAttribute, name[std_out]]] variable[colours] assign[=] tuple[[<ast.Constant object at 0x7da1b2347190>, <ast.Name object at 0x7da1b2344e50>]] for taget[name[row]] in starred[call[name[matrix_iter], parameter[name[matrix], name[version]]]] begin[:] variable[prev_bit] assign[=] <ast.UnaryOp object at 0x7da1b2347490> variable[cnt] assign[=] constant[0] for taget[name[bit]] in starred[name[row]] begin[:] if compare[name[bit] equal[==] name[prev_bit]] begin[:] <ast.AugAssign object at 0x7da1b2345b70> if name[cnt] begin[:] call[name[set_color], parameter[call[name[colours]][name[prev_bit]]]] call[name[write], parameter[binary_operation[constant[ ] * name[cnt]]]] call[name[set_color], parameter[name[default_color]]] call[name[write], parameter[constant[ ]]]
keyword[def] identifier[write_terminal_win] ( identifier[matrix] , identifier[version] , identifier[border] = keyword[None] ): literal[string] keyword[import] identifier[sys] keyword[import] identifier[struct] keyword[import] identifier[ctypes] identifier[write] = identifier[sys] . identifier[stdout] . identifier[write] identifier[std_out] = identifier[ctypes] . identifier[windll] . identifier[kernel32] . identifier[GetStdHandle] (- literal[int] ) identifier[csbi] = identifier[ctypes] . identifier[create_string_buffer] ( literal[int] ) identifier[res] = identifier[ctypes] . identifier[windll] . identifier[kernel32] . identifier[GetConsoleScreenBufferInfo] ( identifier[std_out] , identifier[csbi] ) keyword[if] keyword[not] identifier[res] : keyword[raise] identifier[OSError] ( literal[string] literal[string] ) identifier[default_color] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[csbi] . identifier[raw] )[ literal[int] ] identifier[set_color] = identifier[partial] ( identifier[ctypes] . identifier[windll] . identifier[kernel32] . identifier[SetConsoleTextAttribute] , identifier[std_out] ) identifier[colours] =( literal[int] , identifier[default_color] ) keyword[for] identifier[row] keyword[in] identifier[matrix_iter] ( identifier[matrix] , identifier[version] , identifier[scale] = literal[int] , identifier[border] = identifier[border] ): identifier[prev_bit] =- literal[int] identifier[cnt] = literal[int] keyword[for] identifier[bit] keyword[in] identifier[row] : keyword[if] identifier[bit] == identifier[prev_bit] : identifier[cnt] += literal[int] keyword[else] : keyword[if] identifier[cnt] : identifier[set_color] ( identifier[colours] [ identifier[prev_bit] ]) identifier[write] ( literal[string] * identifier[cnt] ) identifier[prev_bit] = identifier[bit] identifier[cnt] = literal[int] keyword[if] identifier[cnt] : identifier[set_color] ( identifier[colours] [ identifier[prev_bit] ]) identifier[write] ( literal[string] * identifier[cnt] ) identifier[set_color] ( identifier[default_color] ) identifier[write] ( literal[string] )
def write_terminal_win(matrix, version, border=None): # pragma: no cover ' Function to write a QR Code to a MS Windows terminal.\n\n :param matrix: The matrix to serialize.\n :param int version: The (Micro) QR code version\n :param int border: Integer indicating the size of the quiet zone.\n If set to ``None`` (default), the recommended border size\n will be used (``4`` for QR Codes, ``2`` for a Micro QR Codes).\n ' import sys import struct import ctypes write = sys.stdout.write std_out = ctypes.windll.kernel32.GetStdHandle(-11) csbi = ctypes.create_string_buffer(22) res = ctypes.windll.kernel32.GetConsoleScreenBufferInfo(std_out, csbi) if not res: raise OSError('Cannot find information about the console. Not running on the command line?') # depends on [control=['if'], data=[]] default_color = struct.unpack(b'hhhhHhhhhhh', csbi.raw)[4] set_color = partial(ctypes.windll.kernel32.SetConsoleTextAttribute, std_out) colours = (240, default_color) for row in matrix_iter(matrix, version, scale=1, border=border): prev_bit = -1 cnt = 0 for bit in row: if bit == prev_bit: cnt += 1 # depends on [control=['if'], data=[]] else: if cnt: set_color(colours[prev_bit]) write(' ' * cnt) # depends on [control=['if'], data=[]] prev_bit = bit cnt = 1 # depends on [control=['for'], data=['bit']] if cnt: set_color(colours[prev_bit]) write(' ' * cnt) # depends on [control=['if'], data=[]] set_color(default_color) # reset color write('\n') # depends on [control=['for'], data=['row']]
def declare_queue(self, queue_name): """Declare a queue. Has no effect if a queue with the given name already exists. Parameters: queue_name(str): The name of the new queue. Raises: ConnectionClosed: If the underlying channel or connection has been closed. """ attempts = 1 while True: try: if queue_name not in self.queues: self.emit_before("declare_queue", queue_name) self._declare_queue(queue_name) self.queues.add(queue_name) self.emit_after("declare_queue", queue_name) delayed_name = dq_name(queue_name) self._declare_dq_queue(queue_name) self.delay_queues.add(delayed_name) self.emit_after("declare_delay_queue", delayed_name) self._declare_xq_queue(queue_name) break except (pika.exceptions.AMQPConnectionError, pika.exceptions.AMQPChannelError) as e: # pragma: no cover # Delete the channel and the connection so that the next # caller may initiate new ones of each. del self.channel del self.connection attempts += 1 if attempts > MAX_DECLARE_ATTEMPTS: raise ConnectionClosed(e) from None self.logger.debug( "Retrying declare due to closed connection. [%d/%d]", attempts, MAX_DECLARE_ATTEMPTS, )
def function[declare_queue, parameter[self, queue_name]]: constant[Declare a queue. Has no effect if a queue with the given name already exists. Parameters: queue_name(str): The name of the new queue. Raises: ConnectionClosed: If the underlying channel or connection has been closed. ] variable[attempts] assign[=] constant[1] while constant[True] begin[:] <ast.Try object at 0x7da1b1896740>
keyword[def] identifier[declare_queue] ( identifier[self] , identifier[queue_name] ): literal[string] identifier[attempts] = literal[int] keyword[while] keyword[True] : keyword[try] : keyword[if] identifier[queue_name] keyword[not] keyword[in] identifier[self] . identifier[queues] : identifier[self] . identifier[emit_before] ( literal[string] , identifier[queue_name] ) identifier[self] . identifier[_declare_queue] ( identifier[queue_name] ) identifier[self] . identifier[queues] . identifier[add] ( identifier[queue_name] ) identifier[self] . identifier[emit_after] ( literal[string] , identifier[queue_name] ) identifier[delayed_name] = identifier[dq_name] ( identifier[queue_name] ) identifier[self] . identifier[_declare_dq_queue] ( identifier[queue_name] ) identifier[self] . identifier[delay_queues] . identifier[add] ( identifier[delayed_name] ) identifier[self] . identifier[emit_after] ( literal[string] , identifier[delayed_name] ) identifier[self] . identifier[_declare_xq_queue] ( identifier[queue_name] ) keyword[break] keyword[except] ( identifier[pika] . identifier[exceptions] . identifier[AMQPConnectionError] , identifier[pika] . identifier[exceptions] . identifier[AMQPChannelError] ) keyword[as] identifier[e] : keyword[del] identifier[self] . identifier[channel] keyword[del] identifier[self] . identifier[connection] identifier[attempts] += literal[int] keyword[if] identifier[attempts] > identifier[MAX_DECLARE_ATTEMPTS] : keyword[raise] identifier[ConnectionClosed] ( identifier[e] ) keyword[from] keyword[None] identifier[self] . identifier[logger] . identifier[debug] ( literal[string] , identifier[attempts] , identifier[MAX_DECLARE_ATTEMPTS] , )
def declare_queue(self, queue_name): """Declare a queue. Has no effect if a queue with the given name already exists. Parameters: queue_name(str): The name of the new queue. Raises: ConnectionClosed: If the underlying channel or connection has been closed. """ attempts = 1 while True: try: if queue_name not in self.queues: self.emit_before('declare_queue', queue_name) self._declare_queue(queue_name) self.queues.add(queue_name) self.emit_after('declare_queue', queue_name) delayed_name = dq_name(queue_name) self._declare_dq_queue(queue_name) self.delay_queues.add(delayed_name) self.emit_after('declare_delay_queue', delayed_name) self._declare_xq_queue(queue_name) # depends on [control=['if'], data=['queue_name']] break # depends on [control=['try'], data=[]] except (pika.exceptions.AMQPConnectionError, pika.exceptions.AMQPChannelError) as e: # pragma: no cover # Delete the channel and the connection so that the next # caller may initiate new ones of each. del self.channel del self.connection attempts += 1 if attempts > MAX_DECLARE_ATTEMPTS: raise ConnectionClosed(e) from None # depends on [control=['if'], data=[]] self.logger.debug('Retrying declare due to closed connection. [%d/%d]', attempts, MAX_DECLARE_ATTEMPTS) # depends on [control=['except'], data=['e']] # depends on [control=['while'], data=[]]
def api_start(working_dir, host, port, thread=True): """ Start the global API server Returns the API server thread """ api_srv = BlockstackdAPIServer( working_dir, host, port ) log.info("Starting API server on port {}".format(port)) if thread: api_srv.start() return api_srv
def function[api_start, parameter[working_dir, host, port, thread]]: constant[ Start the global API server Returns the API server thread ] variable[api_srv] assign[=] call[name[BlockstackdAPIServer], parameter[name[working_dir], name[host], name[port]]] call[name[log].info, parameter[call[constant[Starting API server on port {}].format, parameter[name[port]]]]] if name[thread] begin[:] call[name[api_srv].start, parameter[]] return[name[api_srv]]
keyword[def] identifier[api_start] ( identifier[working_dir] , identifier[host] , identifier[port] , identifier[thread] = keyword[True] ): literal[string] identifier[api_srv] = identifier[BlockstackdAPIServer] ( identifier[working_dir] , identifier[host] , identifier[port] ) identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[port] )) keyword[if] identifier[thread] : identifier[api_srv] . identifier[start] () keyword[return] identifier[api_srv]
def api_start(working_dir, host, port, thread=True): """ Start the global API server Returns the API server thread """ api_srv = BlockstackdAPIServer(working_dir, host, port) log.info('Starting API server on port {}'.format(port)) if thread: api_srv.start() # depends on [control=['if'], data=[]] return api_srv
def pool_by_id(self, pool_id): """ Method to return object pool by id Param pool_id: pool id Returns object pool """ uri = 'api/v3/pool/%s/' % pool_id return super(ApiPool, self).get(uri)
def function[pool_by_id, parameter[self, pool_id]]: constant[ Method to return object pool by id Param pool_id: pool id Returns object pool ] variable[uri] assign[=] binary_operation[constant[api/v3/pool/%s/] <ast.Mod object at 0x7da2590d6920> name[pool_id]] return[call[call[name[super], parameter[name[ApiPool], name[self]]].get, parameter[name[uri]]]]
keyword[def] identifier[pool_by_id] ( identifier[self] , identifier[pool_id] ): literal[string] identifier[uri] = literal[string] % identifier[pool_id] keyword[return] identifier[super] ( identifier[ApiPool] , identifier[self] ). identifier[get] ( identifier[uri] )
def pool_by_id(self, pool_id): """ Method to return object pool by id Param pool_id: pool id Returns object pool """ uri = 'api/v3/pool/%s/' % pool_id return super(ApiPool, self).get(uri)
def parse_args(sys_argv, usage): """ Return an OptionParser for the script. """ args = sys_argv[1:] parser = OptionParser(usage=usage) options, args = parser.parse_args(args) template, context = args return template, context
def function[parse_args, parameter[sys_argv, usage]]: constant[ Return an OptionParser for the script. ] variable[args] assign[=] call[name[sys_argv]][<ast.Slice object at 0x7da1b26aec80>] variable[parser] assign[=] call[name[OptionParser], parameter[]] <ast.Tuple object at 0x7da1b26ae200> assign[=] call[name[parser].parse_args, parameter[name[args]]] <ast.Tuple object at 0x7da1b26ae380> assign[=] name[args] return[tuple[[<ast.Name object at 0x7da1b26ad7e0>, <ast.Name object at 0x7da1b26ac670>]]]
keyword[def] identifier[parse_args] ( identifier[sys_argv] , identifier[usage] ): literal[string] identifier[args] = identifier[sys_argv] [ literal[int] :] identifier[parser] = identifier[OptionParser] ( identifier[usage] = identifier[usage] ) identifier[options] , identifier[args] = identifier[parser] . identifier[parse_args] ( identifier[args] ) identifier[template] , identifier[context] = identifier[args] keyword[return] identifier[template] , identifier[context]
def parse_args(sys_argv, usage): """ Return an OptionParser for the script. """ args = sys_argv[1:] parser = OptionParser(usage=usage) (options, args) = parser.parse_args(args) (template, context) = args return (template, context)
def filter_users_by_email(email): """Return list of users by email address Typically one, at most just a few in length. First we look through EmailAddress table, than customisable User model table. Add results together avoiding SQL joins and deduplicate. """ from .models import EmailAddress User = get_user_model() mails = EmailAddress.objects.filter(email__iexact=email) users = [e.user for e in mails.prefetch_related('user')] if app_settings.USER_MODEL_EMAIL_FIELD: q_dict = {app_settings.USER_MODEL_EMAIL_FIELD + '__iexact': email} users += list(User.objects.filter(**q_dict)) return list(set(users))
def function[filter_users_by_email, parameter[email]]: constant[Return list of users by email address Typically one, at most just a few in length. First we look through EmailAddress table, than customisable User model table. Add results together avoiding SQL joins and deduplicate. ] from relative_module[models] import module[EmailAddress] variable[User] assign[=] call[name[get_user_model], parameter[]] variable[mails] assign[=] call[name[EmailAddress].objects.filter, parameter[]] variable[users] assign[=] <ast.ListComp object at 0x7da20c794100> if name[app_settings].USER_MODEL_EMAIL_FIELD begin[:] variable[q_dict] assign[=] dictionary[[<ast.BinOp object at 0x7da18f810f10>], [<ast.Name object at 0x7da18f8121a0>]] <ast.AugAssign object at 0x7da18f812b90> return[call[name[list], parameter[call[name[set], parameter[name[users]]]]]]
keyword[def] identifier[filter_users_by_email] ( identifier[email] ): literal[string] keyword[from] . identifier[models] keyword[import] identifier[EmailAddress] identifier[User] = identifier[get_user_model] () identifier[mails] = identifier[EmailAddress] . identifier[objects] . identifier[filter] ( identifier[email__iexact] = identifier[email] ) identifier[users] =[ identifier[e] . identifier[user] keyword[for] identifier[e] keyword[in] identifier[mails] . identifier[prefetch_related] ( literal[string] )] keyword[if] identifier[app_settings] . identifier[USER_MODEL_EMAIL_FIELD] : identifier[q_dict] ={ identifier[app_settings] . identifier[USER_MODEL_EMAIL_FIELD] + literal[string] : identifier[email] } identifier[users] += identifier[list] ( identifier[User] . identifier[objects] . identifier[filter] (** identifier[q_dict] )) keyword[return] identifier[list] ( identifier[set] ( identifier[users] ))
def filter_users_by_email(email): """Return list of users by email address Typically one, at most just a few in length. First we look through EmailAddress table, than customisable User model table. Add results together avoiding SQL joins and deduplicate. """ from .models import EmailAddress User = get_user_model() mails = EmailAddress.objects.filter(email__iexact=email) users = [e.user for e in mails.prefetch_related('user')] if app_settings.USER_MODEL_EMAIL_FIELD: q_dict = {app_settings.USER_MODEL_EMAIL_FIELD + '__iexact': email} users += list(User.objects.filter(**q_dict)) # depends on [control=['if'], data=[]] return list(set(users))
def simplify_scalar(self, func=sympy.simplify): """Simplify all scalar expressions within S, L and H Return a new :class:`SLH` object with the simplified expressions. See also: :meth:`.QuantumExpression.simplify_scalar` """ return SLH( self.S.simplify_scalar(func=func), self.L.simplify_scalar(func=func), self.H.simplify_scalar(func=func))
def function[simplify_scalar, parameter[self, func]]: constant[Simplify all scalar expressions within S, L and H Return a new :class:`SLH` object with the simplified expressions. See also: :meth:`.QuantumExpression.simplify_scalar` ] return[call[name[SLH], parameter[call[name[self].S.simplify_scalar, parameter[]], call[name[self].L.simplify_scalar, parameter[]], call[name[self].H.simplify_scalar, parameter[]]]]]
keyword[def] identifier[simplify_scalar] ( identifier[self] , identifier[func] = identifier[sympy] . identifier[simplify] ): literal[string] keyword[return] identifier[SLH] ( identifier[self] . identifier[S] . identifier[simplify_scalar] ( identifier[func] = identifier[func] ), identifier[self] . identifier[L] . identifier[simplify_scalar] ( identifier[func] = identifier[func] ), identifier[self] . identifier[H] . identifier[simplify_scalar] ( identifier[func] = identifier[func] ))
def simplify_scalar(self, func=sympy.simplify): """Simplify all scalar expressions within S, L and H Return a new :class:`SLH` object with the simplified expressions. See also: :meth:`.QuantumExpression.simplify_scalar` """ return SLH(self.S.simplify_scalar(func=func), self.L.simplify_scalar(func=func), self.H.simplify_scalar(func=func))
def pluginPackagePaths(name): """ Return a list of additional directories which should be searched for modules to be included as part of the named plugin package. @type name: C{str} @param name: The fully-qualified Python name of a plugin package, eg C{'twisted.plugins'}. @rtype: C{list} of C{str} @return: The absolute paths to other directories which may contain plugin modules for the named plugin package. """ package = name.split('.') # Note that this may include directories which do not exist. It may be # preferable to remove such directories at this point, rather than allow # them to be searched later on. # # Note as well that only '__init__.py' will be considered to make a # directory a package (and thus exclude it from this list). This means # that if you create a master plugin package which has some other kind of # __init__ (eg, __init__.pyc) it will be incorrectly treated as a # supplementary plugin directory. return [ os.path.abspath(os.path.join(x, *package)) for x in sys.path if not os.path.exists(os.path.join(x, *package + ['__init__.py']))]
def function[pluginPackagePaths, parameter[name]]: constant[ Return a list of additional directories which should be searched for modules to be included as part of the named plugin package. @type name: C{str} @param name: The fully-qualified Python name of a plugin package, eg C{'twisted.plugins'}. @rtype: C{list} of C{str} @return: The absolute paths to other directories which may contain plugin modules for the named plugin package. ] variable[package] assign[=] call[name[name].split, parameter[constant[.]]] return[<ast.ListComp object at 0x7da20c794520>]
keyword[def] identifier[pluginPackagePaths] ( identifier[name] ): literal[string] identifier[package] = identifier[name] . identifier[split] ( literal[string] ) keyword[return] [ identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[join] ( identifier[x] ,* identifier[package] )) keyword[for] identifier[x] keyword[in] identifier[sys] . identifier[path] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[os] . identifier[path] . identifier[join] ( identifier[x] ,* identifier[package] +[ literal[string] ]))]
def pluginPackagePaths(name): """ Return a list of additional directories which should be searched for modules to be included as part of the named plugin package. @type name: C{str} @param name: The fully-qualified Python name of a plugin package, eg C{'twisted.plugins'}. @rtype: C{list} of C{str} @return: The absolute paths to other directories which may contain plugin modules for the named plugin package. """ package = name.split('.') # Note that this may include directories which do not exist. It may be # preferable to remove such directories at this point, rather than allow # them to be searched later on. # # Note as well that only '__init__.py' will be considered to make a # directory a package (and thus exclude it from this list). This means # that if you create a master plugin package which has some other kind of # __init__ (eg, __init__.pyc) it will be incorrectly treated as a # supplementary plugin directory. return [os.path.abspath(os.path.join(x, *package)) for x in sys.path if not os.path.exists(os.path.join(x, *package + ['__init__.py']))]
def get_short_annotations(annotations): """ Converts full GATK annotation name to the shortened version :param annotations: :return: """ # Annotations need to match VCF header short_name = {'QualByDepth': 'QD', 'FisherStrand': 'FS', 'StrandOddsRatio': 'SOR', 'ReadPosRankSumTest': 'ReadPosRankSum', 'MappingQualityRankSumTest': 'MQRankSum', 'RMSMappingQuality': 'MQ', 'InbreedingCoeff': 'ID'} short_annotations = [] for annotation in annotations: if annotation in short_name: annotation = short_name[annotation] short_annotations.append(annotation) return short_annotations
def function[get_short_annotations, parameter[annotations]]: constant[ Converts full GATK annotation name to the shortened version :param annotations: :return: ] variable[short_name] assign[=] dictionary[[<ast.Constant object at 0x7da18ede40d0>, <ast.Constant object at 0x7da18ede4160>, <ast.Constant object at 0x7da18ede5720>, <ast.Constant object at 0x7da18ede6e60>, <ast.Constant object at 0x7da18ede6710>, <ast.Constant object at 0x7da18ede72b0>, <ast.Constant object at 0x7da18ede4340>], [<ast.Constant object at 0x7da18ede5690>, <ast.Constant object at 0x7da18ede6590>, <ast.Constant object at 0x7da18ede6a70>, <ast.Constant object at 0x7da18ede6e90>, <ast.Constant object at 0x7da18ede7460>, <ast.Constant object at 0x7da18ede60b0>, <ast.Constant object at 0x7da18ede7dc0>]] variable[short_annotations] assign[=] list[[]] for taget[name[annotation]] in starred[name[annotations]] begin[:] if compare[name[annotation] in name[short_name]] begin[:] variable[annotation] assign[=] call[name[short_name]][name[annotation]] call[name[short_annotations].append, parameter[name[annotation]]] return[name[short_annotations]]
keyword[def] identifier[get_short_annotations] ( identifier[annotations] ): literal[string] identifier[short_name] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] } identifier[short_annotations] =[] keyword[for] identifier[annotation] keyword[in] identifier[annotations] : keyword[if] identifier[annotation] keyword[in] identifier[short_name] : identifier[annotation] = identifier[short_name] [ identifier[annotation] ] identifier[short_annotations] . identifier[append] ( identifier[annotation] ) keyword[return] identifier[short_annotations]
def get_short_annotations(annotations): """ Converts full GATK annotation name to the shortened version :param annotations: :return: """ # Annotations need to match VCF header short_name = {'QualByDepth': 'QD', 'FisherStrand': 'FS', 'StrandOddsRatio': 'SOR', 'ReadPosRankSumTest': 'ReadPosRankSum', 'MappingQualityRankSumTest': 'MQRankSum', 'RMSMappingQuality': 'MQ', 'InbreedingCoeff': 'ID'} short_annotations = [] for annotation in annotations: if annotation in short_name: annotation = short_name[annotation] # depends on [control=['if'], data=['annotation', 'short_name']] short_annotations.append(annotation) # depends on [control=['for'], data=['annotation']] return short_annotations
def matchmaker_matches(institute_id, case_name): """Show all MatchMaker matches for a given case""" # check that only authorized users can access MME patients matches user_obj = store.user(current_user.email) if 'mme_submitter' not in user_obj['roles']: flash('unauthorized request', 'warning') return redirect(request.referrer) # Required params for getting matches from MME server: mme_base_url = current_app.config.get('MME_URL') mme_token = current_app.config.get('MME_TOKEN') if not mme_base_url or not mme_token: flash('An error occurred reading matchmaker connection parameters. Please check config file!', 'danger') return redirect(request.referrer) institute_obj, case_obj = institute_and_case(store, institute_id, case_name) data = controllers.mme_matches(case_obj, institute_obj, mme_base_url, mme_token) if data and data.get('server_errors'): flash('MatchMaker server returned error:{}'.format(data['server_errors']), 'danger') return redirect(request.referrer) elif not data: data = { 'institute' : institute_obj, 'case' : case_obj } return data
def function[matchmaker_matches, parameter[institute_id, case_name]]: constant[Show all MatchMaker matches for a given case] variable[user_obj] assign[=] call[name[store].user, parameter[name[current_user].email]] if compare[constant[mme_submitter] <ast.NotIn object at 0x7da2590d7190> call[name[user_obj]][constant[roles]]] begin[:] call[name[flash], parameter[constant[unauthorized request], constant[warning]]] return[call[name[redirect], parameter[name[request].referrer]]] variable[mme_base_url] assign[=] call[name[current_app].config.get, parameter[constant[MME_URL]]] variable[mme_token] assign[=] call[name[current_app].config.get, parameter[constant[MME_TOKEN]]] if <ast.BoolOp object at 0x7da20cabf160> begin[:] call[name[flash], parameter[constant[An error occurred reading matchmaker connection parameters. Please check config file!], constant[danger]]] return[call[name[redirect], parameter[name[request].referrer]]] <ast.Tuple object at 0x7da1b23443a0> assign[=] call[name[institute_and_case], parameter[name[store], name[institute_id], name[case_name]]] variable[data] assign[=] call[name[controllers].mme_matches, parameter[name[case_obj], name[institute_obj], name[mme_base_url], name[mme_token]]] if <ast.BoolOp object at 0x7da1b23464d0> begin[:] call[name[flash], parameter[call[constant[MatchMaker server returned error:{}].format, parameter[call[name[data]][constant[server_errors]]]], constant[danger]]] return[call[name[redirect], parameter[name[request].referrer]]] return[name[data]]
keyword[def] identifier[matchmaker_matches] ( identifier[institute_id] , identifier[case_name] ): literal[string] identifier[user_obj] = identifier[store] . identifier[user] ( identifier[current_user] . identifier[email] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[user_obj] [ literal[string] ]: identifier[flash] ( literal[string] , literal[string] ) keyword[return] identifier[redirect] ( identifier[request] . identifier[referrer] ) identifier[mme_base_url] = identifier[current_app] . identifier[config] . identifier[get] ( literal[string] ) identifier[mme_token] = identifier[current_app] . identifier[config] . identifier[get] ( literal[string] ) keyword[if] keyword[not] identifier[mme_base_url] keyword[or] keyword[not] identifier[mme_token] : identifier[flash] ( literal[string] , literal[string] ) keyword[return] identifier[redirect] ( identifier[request] . identifier[referrer] ) identifier[institute_obj] , identifier[case_obj] = identifier[institute_and_case] ( identifier[store] , identifier[institute_id] , identifier[case_name] ) identifier[data] = identifier[controllers] . identifier[mme_matches] ( identifier[case_obj] , identifier[institute_obj] , identifier[mme_base_url] , identifier[mme_token] ) keyword[if] identifier[data] keyword[and] identifier[data] . identifier[get] ( literal[string] ): identifier[flash] ( literal[string] . identifier[format] ( identifier[data] [ literal[string] ]), literal[string] ) keyword[return] identifier[redirect] ( identifier[request] . identifier[referrer] ) keyword[elif] keyword[not] identifier[data] : identifier[data] ={ literal[string] : identifier[institute_obj] , literal[string] : identifier[case_obj] } keyword[return] identifier[data]
def matchmaker_matches(institute_id, case_name): """Show all MatchMaker matches for a given case""" # check that only authorized users can access MME patients matches user_obj = store.user(current_user.email) if 'mme_submitter' not in user_obj['roles']: flash('unauthorized request', 'warning') return redirect(request.referrer) # depends on [control=['if'], data=[]] # Required params for getting matches from MME server: mme_base_url = current_app.config.get('MME_URL') mme_token = current_app.config.get('MME_TOKEN') if not mme_base_url or not mme_token: flash('An error occurred reading matchmaker connection parameters. Please check config file!', 'danger') return redirect(request.referrer) # depends on [control=['if'], data=[]] (institute_obj, case_obj) = institute_and_case(store, institute_id, case_name) data = controllers.mme_matches(case_obj, institute_obj, mme_base_url, mme_token) if data and data.get('server_errors'): flash('MatchMaker server returned error:{}'.format(data['server_errors']), 'danger') return redirect(request.referrer) # depends on [control=['if'], data=[]] elif not data: data = {'institute': institute_obj, 'case': case_obj} # depends on [control=['if'], data=[]] return data
def verify(certificate, jar_file, sf_name=None): """ Verifies signature of a JAR file. Limitations: - diagnostic is less verbose than of jarsigner :return None if verification succeeds. :exception SignatureBlockFileVerificationError, ManifestChecksumError, JarChecksumError, JarSignatureMissingError Reference: http://docs.oracle.com/javase/7/docs/technotes/guides/jar/jar.html#Signature_Validation Note that the validation is done in three steps. Failure at any step is a failure of the whole validation. """ # noqua # Step 0: get the "key alias", used also for naming of sig-related files. zip_file = ZipFile(jar_file) sf_files = [f for f in zip_file.namelist() if file_matches_sigfile(f)] if len(sf_files) == 0: raise JarSignatureMissingError("No .SF file in %s" % jar_file) elif len(sf_files) > 1: if sf_name is None: msg = "Multiple .SF files in %s, but SF_NAME.SF not specified" \ % jar_file raise VerificationError(msg) elif ('META-INF/' + sf_name) in sf_files: sf_filename = 'META-INF/' + sf_name else: msg = "No .SF file in %s named META-INF/%s (found %d .SF files)" \ % (jar_file, sf_name, len(sf_files)) raise VerificationError(msg) elif len(sf_files) == 1: if sf_name is None: sf_filename = sf_files[0] elif sf_files[0] == 'META-INF/' + sf_name: sf_filename = sf_files[0] else: msg = "No .SF file in %s named META-INF/%s" % (jar_file, sf_name) raise VerificationError(msg) key_alias = sf_filename[9:-3] # "META-INF/%s.SF" sf_data = zip_file.read(sf_filename) # Step 1: check the crypto part. file_list = zip_file.namelist() sig_block_filename = None # JAR specification mentions only RSA and DSA; jarsigner also has EC # TODO: what about "SIG-*"? signature_extensions = ("RSA", "DSA", "EC") for extension in signature_extensions: candidate_filename = "META-INF/%s.%s" % (key_alias, extension) if candidate_filename in file_list: sig_block_filename = candidate_filename break if sig_block_filename is None: msg = "None of %s found in JAR" % \ ", ".join(key_alias + "." + x for x in signature_extensions) raise JarSignatureMissingError(msg) sig_block_data = zip_file.read(sig_block_filename) try: verify_signature_block(certificate, sf_data, sig_block_data) except SignatureBlockVerificationError as message: message = "Signature block verification failed: %s" % message raise SignatureBlockFileVerificationError(message) # KEYALIAS.SF is correctly signed. # Step 2: Check that it contains correct checksum of the manifest. signature_manifest = SignatureManifest() signature_manifest.parse(sf_data) jar_manifest = Manifest() jar_manifest.load_from_jar(jar_file) errors = signature_manifest.verify_manifest(jar_manifest) if len(errors) > 0: msg = "%s: in .SF file, section checksum(s) failed for: %s" \ % (jar_file, ",".join(errors)) raise ManifestChecksumError(msg) # Checksums of MANIFEST.MF itself are correct. # Step 3: Check that it contains valid checksums for each file # from the JAR. NOTE: the check is done for JAR entries. If some # JAR entries are deleted after signing, the verification still # succeeds. This seems to not follow the reference specification, # but that's what jarsigner does. errors = jar_manifest.verify_jar_checksums(jar_file) if len(errors) > 0: msg = "Checksum(s) for jar entries of jar file %s failed for: %s" \ % (jar_file, ",".join(errors)) raise JarChecksumError(msg) return None
def function[verify, parameter[certificate, jar_file, sf_name]]: constant[ Verifies signature of a JAR file. Limitations: - diagnostic is less verbose than of jarsigner :return None if verification succeeds. :exception SignatureBlockFileVerificationError, ManifestChecksumError, JarChecksumError, JarSignatureMissingError Reference: http://docs.oracle.com/javase/7/docs/technotes/guides/jar/jar.html#Signature_Validation Note that the validation is done in three steps. Failure at any step is a failure of the whole validation. ] variable[zip_file] assign[=] call[name[ZipFile], parameter[name[jar_file]]] variable[sf_files] assign[=] <ast.ListComp object at 0x7da1b0b061d0> if compare[call[name[len], parameter[name[sf_files]]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da1b0b06770> variable[key_alias] assign[=] call[name[sf_filename]][<ast.Slice object at 0x7da1b0b05d80>] variable[sf_data] assign[=] call[name[zip_file].read, parameter[name[sf_filename]]] variable[file_list] assign[=] call[name[zip_file].namelist, parameter[]] variable[sig_block_filename] assign[=] constant[None] variable[signature_extensions] assign[=] tuple[[<ast.Constant object at 0x7da1b0b063e0>, <ast.Constant object at 0x7da1b0b06560>, <ast.Constant object at 0x7da1b0b06410>]] for taget[name[extension]] in starred[name[signature_extensions]] begin[:] variable[candidate_filename] assign[=] binary_operation[constant[META-INF/%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0b07a30>, <ast.Name object at 0x7da1b0b07a00>]]] if compare[name[candidate_filename] in name[file_list]] begin[:] variable[sig_block_filename] assign[=] name[candidate_filename] break if compare[name[sig_block_filename] is constant[None]] begin[:] variable[msg] assign[=] binary_operation[constant[None of %s found in JAR] <ast.Mod object at 0x7da2590d6920> call[constant[, ].join, parameter[<ast.GeneratorExp object at 0x7da1b0b04b20>]]] <ast.Raise object at 0x7da1b0b04d00> variable[sig_block_data] assign[=] call[name[zip_file].read, parameter[name[sig_block_filename]]] <ast.Try object at 0x7da1b0b04ee0> variable[signature_manifest] assign[=] call[name[SignatureManifest], parameter[]] call[name[signature_manifest].parse, parameter[name[sf_data]]] variable[jar_manifest] assign[=] call[name[Manifest], parameter[]] call[name[jar_manifest].load_from_jar, parameter[name[jar_file]]] variable[errors] assign[=] call[name[signature_manifest].verify_manifest, parameter[name[jar_manifest]]] if compare[call[name[len], parameter[name[errors]]] greater[>] constant[0]] begin[:] variable[msg] assign[=] binary_operation[constant[%s: in .SF file, section checksum(s) failed for: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0b073a0>, <ast.Call object at 0x7da1b0b07370>]]] <ast.Raise object at 0x7da1b0b06c50> variable[errors] assign[=] call[name[jar_manifest].verify_jar_checksums, parameter[name[jar_file]]] if compare[call[name[len], parameter[name[errors]]] greater[>] constant[0]] begin[:] variable[msg] assign[=] binary_operation[constant[Checksum(s) for jar entries of jar file %s failed for: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0b1a920>, <ast.Call object at 0x7da1b0b180a0>]]] <ast.Raise object at 0x7da1b0b1afb0> return[constant[None]]
keyword[def] identifier[verify] ( identifier[certificate] , identifier[jar_file] , identifier[sf_name] = keyword[None] ): literal[string] identifier[zip_file] = identifier[ZipFile] ( identifier[jar_file] ) identifier[sf_files] =[ identifier[f] keyword[for] identifier[f] keyword[in] identifier[zip_file] . identifier[namelist] () keyword[if] identifier[file_matches_sigfile] ( identifier[f] )] keyword[if] identifier[len] ( identifier[sf_files] )== literal[int] : keyword[raise] identifier[JarSignatureMissingError] ( literal[string] % identifier[jar_file] ) keyword[elif] identifier[len] ( identifier[sf_files] )> literal[int] : keyword[if] identifier[sf_name] keyword[is] keyword[None] : identifier[msg] = literal[string] % identifier[jar_file] keyword[raise] identifier[VerificationError] ( identifier[msg] ) keyword[elif] ( literal[string] + identifier[sf_name] ) keyword[in] identifier[sf_files] : identifier[sf_filename] = literal[string] + identifier[sf_name] keyword[else] : identifier[msg] = literal[string] %( identifier[jar_file] , identifier[sf_name] , identifier[len] ( identifier[sf_files] )) keyword[raise] identifier[VerificationError] ( identifier[msg] ) keyword[elif] identifier[len] ( identifier[sf_files] )== literal[int] : keyword[if] identifier[sf_name] keyword[is] keyword[None] : identifier[sf_filename] = identifier[sf_files] [ literal[int] ] keyword[elif] identifier[sf_files] [ literal[int] ]== literal[string] + identifier[sf_name] : identifier[sf_filename] = identifier[sf_files] [ literal[int] ] keyword[else] : identifier[msg] = literal[string] %( identifier[jar_file] , identifier[sf_name] ) keyword[raise] identifier[VerificationError] ( identifier[msg] ) identifier[key_alias] = identifier[sf_filename] [ literal[int] :- literal[int] ] identifier[sf_data] = identifier[zip_file] . identifier[read] ( identifier[sf_filename] ) identifier[file_list] = identifier[zip_file] . identifier[namelist] () identifier[sig_block_filename] = keyword[None] identifier[signature_extensions] =( literal[string] , literal[string] , literal[string] ) keyword[for] identifier[extension] keyword[in] identifier[signature_extensions] : identifier[candidate_filename] = literal[string] %( identifier[key_alias] , identifier[extension] ) keyword[if] identifier[candidate_filename] keyword[in] identifier[file_list] : identifier[sig_block_filename] = identifier[candidate_filename] keyword[break] keyword[if] identifier[sig_block_filename] keyword[is] keyword[None] : identifier[msg] = literal[string] % literal[string] . identifier[join] ( identifier[key_alias] + literal[string] + identifier[x] keyword[for] identifier[x] keyword[in] identifier[signature_extensions] ) keyword[raise] identifier[JarSignatureMissingError] ( identifier[msg] ) identifier[sig_block_data] = identifier[zip_file] . identifier[read] ( identifier[sig_block_filename] ) keyword[try] : identifier[verify_signature_block] ( identifier[certificate] , identifier[sf_data] , identifier[sig_block_data] ) keyword[except] identifier[SignatureBlockVerificationError] keyword[as] identifier[message] : identifier[message] = literal[string] % identifier[message] keyword[raise] identifier[SignatureBlockFileVerificationError] ( identifier[message] ) identifier[signature_manifest] = identifier[SignatureManifest] () identifier[signature_manifest] . identifier[parse] ( identifier[sf_data] ) identifier[jar_manifest] = identifier[Manifest] () identifier[jar_manifest] . identifier[load_from_jar] ( identifier[jar_file] ) identifier[errors] = identifier[signature_manifest] . identifier[verify_manifest] ( identifier[jar_manifest] ) keyword[if] identifier[len] ( identifier[errors] )> literal[int] : identifier[msg] = literal[string] %( identifier[jar_file] , literal[string] . identifier[join] ( identifier[errors] )) keyword[raise] identifier[ManifestChecksumError] ( identifier[msg] ) identifier[errors] = identifier[jar_manifest] . identifier[verify_jar_checksums] ( identifier[jar_file] ) keyword[if] identifier[len] ( identifier[errors] )> literal[int] : identifier[msg] = literal[string] %( identifier[jar_file] , literal[string] . identifier[join] ( identifier[errors] )) keyword[raise] identifier[JarChecksumError] ( identifier[msg] ) keyword[return] keyword[None]
def verify(certificate, jar_file, sf_name=None): """ Verifies signature of a JAR file. Limitations: - diagnostic is less verbose than of jarsigner :return None if verification succeeds. :exception SignatureBlockFileVerificationError, ManifestChecksumError, JarChecksumError, JarSignatureMissingError Reference: http://docs.oracle.com/javase/7/docs/technotes/guides/jar/jar.html#Signature_Validation Note that the validation is done in three steps. Failure at any step is a failure of the whole validation. """ # noqua # Step 0: get the "key alias", used also for naming of sig-related files. zip_file = ZipFile(jar_file) sf_files = [f for f in zip_file.namelist() if file_matches_sigfile(f)] if len(sf_files) == 0: raise JarSignatureMissingError('No .SF file in %s' % jar_file) # depends on [control=['if'], data=[]] elif len(sf_files) > 1: if sf_name is None: msg = 'Multiple .SF files in %s, but SF_NAME.SF not specified' % jar_file raise VerificationError(msg) # depends on [control=['if'], data=[]] elif 'META-INF/' + sf_name in sf_files: sf_filename = 'META-INF/' + sf_name # depends on [control=['if'], data=[]] else: msg = 'No .SF file in %s named META-INF/%s (found %d .SF files)' % (jar_file, sf_name, len(sf_files)) raise VerificationError(msg) # depends on [control=['if'], data=[]] elif len(sf_files) == 1: if sf_name is None: sf_filename = sf_files[0] # depends on [control=['if'], data=[]] elif sf_files[0] == 'META-INF/' + sf_name: sf_filename = sf_files[0] # depends on [control=['if'], data=[]] else: msg = 'No .SF file in %s named META-INF/%s' % (jar_file, sf_name) raise VerificationError(msg) # depends on [control=['if'], data=[]] key_alias = sf_filename[9:-3] # "META-INF/%s.SF" sf_data = zip_file.read(sf_filename) # Step 1: check the crypto part. file_list = zip_file.namelist() sig_block_filename = None # JAR specification mentions only RSA and DSA; jarsigner also has EC # TODO: what about "SIG-*"? signature_extensions = ('RSA', 'DSA', 'EC') for extension in signature_extensions: candidate_filename = 'META-INF/%s.%s' % (key_alias, extension) if candidate_filename in file_list: sig_block_filename = candidate_filename break # depends on [control=['if'], data=['candidate_filename']] # depends on [control=['for'], data=['extension']] if sig_block_filename is None: msg = 'None of %s found in JAR' % ', '.join((key_alias + '.' + x for x in signature_extensions)) raise JarSignatureMissingError(msg) # depends on [control=['if'], data=[]] sig_block_data = zip_file.read(sig_block_filename) try: verify_signature_block(certificate, sf_data, sig_block_data) # depends on [control=['try'], data=[]] except SignatureBlockVerificationError as message: message = 'Signature block verification failed: %s' % message raise SignatureBlockFileVerificationError(message) # depends on [control=['except'], data=['message']] # KEYALIAS.SF is correctly signed. # Step 2: Check that it contains correct checksum of the manifest. signature_manifest = SignatureManifest() signature_manifest.parse(sf_data) jar_manifest = Manifest() jar_manifest.load_from_jar(jar_file) errors = signature_manifest.verify_manifest(jar_manifest) if len(errors) > 0: msg = '%s: in .SF file, section checksum(s) failed for: %s' % (jar_file, ','.join(errors)) raise ManifestChecksumError(msg) # depends on [control=['if'], data=[]] # Checksums of MANIFEST.MF itself are correct. # Step 3: Check that it contains valid checksums for each file # from the JAR. NOTE: the check is done for JAR entries. If some # JAR entries are deleted after signing, the verification still # succeeds. This seems to not follow the reference specification, # but that's what jarsigner does. errors = jar_manifest.verify_jar_checksums(jar_file) if len(errors) > 0: msg = 'Checksum(s) for jar entries of jar file %s failed for: %s' % (jar_file, ','.join(errors)) raise JarChecksumError(msg) # depends on [control=['if'], data=[]] return None
def create_many(self, statements): """ Creates multiple statement entries. """ create_statements = [] for statement in statements: statement_data = statement.serialize() tag_data = list(set(statement_data.pop('tags', []))) statement_data['tags'] = tag_data if not statement.search_text: statement_data['search_text'] = self.tagger.get_bigram_pair_string(statement.text) if not statement.search_in_response_to and statement.in_response_to: statement_data['search_in_response_to'] = self.tagger.get_bigram_pair_string(statement.in_response_to) create_statements.append(statement_data) self.statements.insert_many(create_statements)
def function[create_many, parameter[self, statements]]: constant[ Creates multiple statement entries. ] variable[create_statements] assign[=] list[[]] for taget[name[statement]] in starred[name[statements]] begin[:] variable[statement_data] assign[=] call[name[statement].serialize, parameter[]] variable[tag_data] assign[=] call[name[list], parameter[call[name[set], parameter[call[name[statement_data].pop, parameter[constant[tags], list[[]]]]]]]] call[name[statement_data]][constant[tags]] assign[=] name[tag_data] if <ast.UnaryOp object at 0x7da1b1eb9900> begin[:] call[name[statement_data]][constant[search_text]] assign[=] call[name[self].tagger.get_bigram_pair_string, parameter[name[statement].text]] if <ast.BoolOp object at 0x7da1b1ff1720> begin[:] call[name[statement_data]][constant[search_in_response_to]] assign[=] call[name[self].tagger.get_bigram_pair_string, parameter[name[statement].in_response_to]] call[name[create_statements].append, parameter[name[statement_data]]] call[name[self].statements.insert_many, parameter[name[create_statements]]]
keyword[def] identifier[create_many] ( identifier[self] , identifier[statements] ): literal[string] identifier[create_statements] =[] keyword[for] identifier[statement] keyword[in] identifier[statements] : identifier[statement_data] = identifier[statement] . identifier[serialize] () identifier[tag_data] = identifier[list] ( identifier[set] ( identifier[statement_data] . identifier[pop] ( literal[string] ,[]))) identifier[statement_data] [ literal[string] ]= identifier[tag_data] keyword[if] keyword[not] identifier[statement] . identifier[search_text] : identifier[statement_data] [ literal[string] ]= identifier[self] . identifier[tagger] . identifier[get_bigram_pair_string] ( identifier[statement] . identifier[text] ) keyword[if] keyword[not] identifier[statement] . identifier[search_in_response_to] keyword[and] identifier[statement] . identifier[in_response_to] : identifier[statement_data] [ literal[string] ]= identifier[self] . identifier[tagger] . identifier[get_bigram_pair_string] ( identifier[statement] . identifier[in_response_to] ) identifier[create_statements] . identifier[append] ( identifier[statement_data] ) identifier[self] . identifier[statements] . identifier[insert_many] ( identifier[create_statements] )
def create_many(self, statements): """ Creates multiple statement entries. """ create_statements = [] for statement in statements: statement_data = statement.serialize() tag_data = list(set(statement_data.pop('tags', []))) statement_data['tags'] = tag_data if not statement.search_text: statement_data['search_text'] = self.tagger.get_bigram_pair_string(statement.text) # depends on [control=['if'], data=[]] if not statement.search_in_response_to and statement.in_response_to: statement_data['search_in_response_to'] = self.tagger.get_bigram_pair_string(statement.in_response_to) # depends on [control=['if'], data=[]] create_statements.append(statement_data) # depends on [control=['for'], data=['statement']] self.statements.insert_many(create_statements)
def update(cls, request_response_id, monetary_account_id=None, amount_responded=None, status=None, address_shipping=None, address_billing=None, custom_headers=None): """ Update the status to accept or reject the RequestResponse. :type user_id: int :type monetary_account_id: int :type request_response_id: int :param amount_responded: The Amount the user decides to pay. :type amount_responded: object_.Amount :param status: The responding status of the RequestResponse. Can be ACCEPTED or REJECTED. :type status: str :param address_shipping: The shipping Address to return to the user who created the RequestInquiry. Should only be provided if 'require_address' is set to SHIPPING, BILLING_SHIPPING or OPTIONAL. :type address_shipping: object_.Address :param address_billing: The billing Address to return to the user who created the RequestInquiry. Should only be provided if 'require_address' is set to BILLING, BILLING_SHIPPING or OPTIONAL. :type address_billing: object_.Address :type custom_headers: dict[str, str]|None :rtype: BunqResponseRequestResponse """ if custom_headers is None: custom_headers = {} api_client = client.ApiClient(cls._get_api_context()) request_map = { cls.FIELD_AMOUNT_RESPONDED: amount_responded, cls.FIELD_STATUS: status, cls.FIELD_ADDRESS_SHIPPING: address_shipping, cls.FIELD_ADDRESS_BILLING: address_billing } request_map_string = converter.class_to_json(request_map) request_map_string = cls._remove_field_for_request(request_map_string) request_bytes = request_map_string.encode() endpoint_url = cls._ENDPOINT_URL_UPDATE.format(cls._determine_user_id(), cls._determine_monetary_account_id( monetary_account_id), request_response_id) response_raw = api_client.put(endpoint_url, request_bytes, custom_headers) return BunqResponseRequestResponse.cast_from_bunq_response( cls._from_json(response_raw, cls._OBJECT_TYPE_PUT) )
def function[update, parameter[cls, request_response_id, monetary_account_id, amount_responded, status, address_shipping, address_billing, custom_headers]]: constant[ Update the status to accept or reject the RequestResponse. :type user_id: int :type monetary_account_id: int :type request_response_id: int :param amount_responded: The Amount the user decides to pay. :type amount_responded: object_.Amount :param status: The responding status of the RequestResponse. Can be ACCEPTED or REJECTED. :type status: str :param address_shipping: The shipping Address to return to the user who created the RequestInquiry. Should only be provided if 'require_address' is set to SHIPPING, BILLING_SHIPPING or OPTIONAL. :type address_shipping: object_.Address :param address_billing: The billing Address to return to the user who created the RequestInquiry. Should only be provided if 'require_address' is set to BILLING, BILLING_SHIPPING or OPTIONAL. :type address_billing: object_.Address :type custom_headers: dict[str, str]|None :rtype: BunqResponseRequestResponse ] if compare[name[custom_headers] is constant[None]] begin[:] variable[custom_headers] assign[=] dictionary[[], []] variable[api_client] assign[=] call[name[client].ApiClient, parameter[call[name[cls]._get_api_context, parameter[]]]] variable[request_map] assign[=] dictionary[[<ast.Attribute object at 0x7da1b08470d0>, <ast.Attribute object at 0x7da1b0846aa0>, <ast.Attribute object at 0x7da1b0844d30>, <ast.Attribute object at 0x7da1b08471f0>], [<ast.Name object at 0x7da1b0844af0>, <ast.Name object at 0x7da1b0846e30>, <ast.Name object at 0x7da1b08452d0>, <ast.Name object at 0x7da1b08451b0>]] variable[request_map_string] assign[=] call[name[converter].class_to_json, parameter[name[request_map]]] variable[request_map_string] assign[=] call[name[cls]._remove_field_for_request, parameter[name[request_map_string]]] variable[request_bytes] assign[=] call[name[request_map_string].encode, parameter[]] variable[endpoint_url] assign[=] call[name[cls]._ENDPOINT_URL_UPDATE.format, parameter[call[name[cls]._determine_user_id, parameter[]], call[name[cls]._determine_monetary_account_id, parameter[name[monetary_account_id]]], name[request_response_id]]] variable[response_raw] assign[=] call[name[api_client].put, parameter[name[endpoint_url], name[request_bytes], name[custom_headers]]] return[call[name[BunqResponseRequestResponse].cast_from_bunq_response, parameter[call[name[cls]._from_json, parameter[name[response_raw], name[cls]._OBJECT_TYPE_PUT]]]]]
keyword[def] identifier[update] ( identifier[cls] , identifier[request_response_id] , identifier[monetary_account_id] = keyword[None] , identifier[amount_responded] = keyword[None] , identifier[status] = keyword[None] , identifier[address_shipping] = keyword[None] , identifier[address_billing] = keyword[None] , identifier[custom_headers] = keyword[None] ): literal[string] keyword[if] identifier[custom_headers] keyword[is] keyword[None] : identifier[custom_headers] ={} identifier[api_client] = identifier[client] . identifier[ApiClient] ( identifier[cls] . identifier[_get_api_context] ()) identifier[request_map] ={ identifier[cls] . identifier[FIELD_AMOUNT_RESPONDED] : identifier[amount_responded] , identifier[cls] . identifier[FIELD_STATUS] : identifier[status] , identifier[cls] . identifier[FIELD_ADDRESS_SHIPPING] : identifier[address_shipping] , identifier[cls] . identifier[FIELD_ADDRESS_BILLING] : identifier[address_billing] } identifier[request_map_string] = identifier[converter] . identifier[class_to_json] ( identifier[request_map] ) identifier[request_map_string] = identifier[cls] . identifier[_remove_field_for_request] ( identifier[request_map_string] ) identifier[request_bytes] = identifier[request_map_string] . identifier[encode] () identifier[endpoint_url] = identifier[cls] . identifier[_ENDPOINT_URL_UPDATE] . identifier[format] ( identifier[cls] . identifier[_determine_user_id] (), identifier[cls] . identifier[_determine_monetary_account_id] ( identifier[monetary_account_id] ), identifier[request_response_id] ) identifier[response_raw] = identifier[api_client] . identifier[put] ( identifier[endpoint_url] , identifier[request_bytes] , identifier[custom_headers] ) keyword[return] identifier[BunqResponseRequestResponse] . identifier[cast_from_bunq_response] ( identifier[cls] . identifier[_from_json] ( identifier[response_raw] , identifier[cls] . identifier[_OBJECT_TYPE_PUT] ) )
def update(cls, request_response_id, monetary_account_id=None, amount_responded=None, status=None, address_shipping=None, address_billing=None, custom_headers=None): """ Update the status to accept or reject the RequestResponse. :type user_id: int :type monetary_account_id: int :type request_response_id: int :param amount_responded: The Amount the user decides to pay. :type amount_responded: object_.Amount :param status: The responding status of the RequestResponse. Can be ACCEPTED or REJECTED. :type status: str :param address_shipping: The shipping Address to return to the user who created the RequestInquiry. Should only be provided if 'require_address' is set to SHIPPING, BILLING_SHIPPING or OPTIONAL. :type address_shipping: object_.Address :param address_billing: The billing Address to return to the user who created the RequestInquiry. Should only be provided if 'require_address' is set to BILLING, BILLING_SHIPPING or OPTIONAL. :type address_billing: object_.Address :type custom_headers: dict[str, str]|None :rtype: BunqResponseRequestResponse """ if custom_headers is None: custom_headers = {} # depends on [control=['if'], data=['custom_headers']] api_client = client.ApiClient(cls._get_api_context()) request_map = {cls.FIELD_AMOUNT_RESPONDED: amount_responded, cls.FIELD_STATUS: status, cls.FIELD_ADDRESS_SHIPPING: address_shipping, cls.FIELD_ADDRESS_BILLING: address_billing} request_map_string = converter.class_to_json(request_map) request_map_string = cls._remove_field_for_request(request_map_string) request_bytes = request_map_string.encode() endpoint_url = cls._ENDPOINT_URL_UPDATE.format(cls._determine_user_id(), cls._determine_monetary_account_id(monetary_account_id), request_response_id) response_raw = api_client.put(endpoint_url, request_bytes, custom_headers) return BunqResponseRequestResponse.cast_from_bunq_response(cls._from_json(response_raw, cls._OBJECT_TYPE_PUT))
def to_tess(obj): ''' to_tess(obj) yields a Tesselation object that is equivalent to obj; if obj is a tesselation object already and no changes are requested (see options) then obj is returned unmolested. The following objects can be converted into tesselations: * a tesselation object * a mesh or topology object (yields their tess objects) * a 3 x n or n x 3 matrix of integers (the faces) * a tuple of coordinates and faces that can be passed to to_mesh ''' if is_tess(obj): return obj elif is_mesh(obj): return obj.tess elif is_topo(obj): return obj.tess else: # couple things to try: (1) might specify a tess face matrix, (2) might be a mesh-like obj try: return tess(obj) except Exception: pass try: return to_mesh(obj).tess except Exception: pass raise ValueError('Could not convert argument to tesselation object')
def function[to_tess, parameter[obj]]: constant[ to_tess(obj) yields a Tesselation object that is equivalent to obj; if obj is a tesselation object already and no changes are requested (see options) then obj is returned unmolested. The following objects can be converted into tesselations: * a tesselation object * a mesh or topology object (yields their tess objects) * a 3 x n or n x 3 matrix of integers (the faces) * a tuple of coordinates and faces that can be passed to to_mesh ] if call[name[is_tess], parameter[name[obj]]] begin[:] return[name[obj]] <ast.Raise object at 0x7da20c6c4d60>
keyword[def] identifier[to_tess] ( identifier[obj] ): literal[string] keyword[if] identifier[is_tess] ( identifier[obj] ): keyword[return] identifier[obj] keyword[elif] identifier[is_mesh] ( identifier[obj] ): keyword[return] identifier[obj] . identifier[tess] keyword[elif] identifier[is_topo] ( identifier[obj] ): keyword[return] identifier[obj] . identifier[tess] keyword[else] : keyword[try] : keyword[return] identifier[tess] ( identifier[obj] ) keyword[except] identifier[Exception] : keyword[pass] keyword[try] : keyword[return] identifier[to_mesh] ( identifier[obj] ). identifier[tess] keyword[except] identifier[Exception] : keyword[pass] keyword[raise] identifier[ValueError] ( literal[string] )
def to_tess(obj): """ to_tess(obj) yields a Tesselation object that is equivalent to obj; if obj is a tesselation object already and no changes are requested (see options) then obj is returned unmolested. The following objects can be converted into tesselations: * a tesselation object * a mesh or topology object (yields their tess objects) * a 3 x n or n x 3 matrix of integers (the faces) * a tuple of coordinates and faces that can be passed to to_mesh """ if is_tess(obj): return obj # depends on [control=['if'], data=[]] elif is_mesh(obj): return obj.tess # depends on [control=['if'], data=[]] elif is_topo(obj): return obj.tess # depends on [control=['if'], data=[]] else: # couple things to try: (1) might specify a tess face matrix, (2) might be a mesh-like obj try: return tess(obj) # depends on [control=['try'], data=[]] except Exception: pass # depends on [control=['except'], data=[]] try: return to_mesh(obj).tess # depends on [control=['try'], data=[]] except Exception: pass # depends on [control=['except'], data=[]] raise ValueError('Could not convert argument to tesselation object')
def _term(self, term): """Add a term to the query. Arguments: term (str): The term to add. Returns: SearchHelper: Self """ # All terms must be strings for Elasticsearch term = str(term) if term: self.__query["q"] += term return self
def function[_term, parameter[self, term]]: constant[Add a term to the query. Arguments: term (str): The term to add. Returns: SearchHelper: Self ] variable[term] assign[=] call[name[str], parameter[name[term]]] if name[term] begin[:] <ast.AugAssign object at 0x7da1b2348880> return[name[self]]
keyword[def] identifier[_term] ( identifier[self] , identifier[term] ): literal[string] identifier[term] = identifier[str] ( identifier[term] ) keyword[if] identifier[term] : identifier[self] . identifier[__query] [ literal[string] ]+= identifier[term] keyword[return] identifier[self]
def _term(self, term): """Add a term to the query. Arguments: term (str): The term to add. Returns: SearchHelper: Self """ # All terms must be strings for Elasticsearch term = str(term) if term: self.__query['q'] += term # depends on [control=['if'], data=[]] return self
def elem_wrap(self, tree, debug=False, root_id=None): """takes a DGParentedTree and puts a nucleus or satellite on top, depending on the nuclearity of the root element of the tree. """ if root_id is None: root_id = tree.root_id elem = self.elem_dict[root_id] if elem['nuclearity'] == 'nucleus': return n_wrap(tree, debug=debug, root_id=root_id) else: return s_wrap(tree, debug=debug, root_id=root_id)
def function[elem_wrap, parameter[self, tree, debug, root_id]]: constant[takes a DGParentedTree and puts a nucleus or satellite on top, depending on the nuclearity of the root element of the tree. ] if compare[name[root_id] is constant[None]] begin[:] variable[root_id] assign[=] name[tree].root_id variable[elem] assign[=] call[name[self].elem_dict][name[root_id]] if compare[call[name[elem]][constant[nuclearity]] equal[==] constant[nucleus]] begin[:] return[call[name[n_wrap], parameter[name[tree]]]]
keyword[def] identifier[elem_wrap] ( identifier[self] , identifier[tree] , identifier[debug] = keyword[False] , identifier[root_id] = keyword[None] ): literal[string] keyword[if] identifier[root_id] keyword[is] keyword[None] : identifier[root_id] = identifier[tree] . identifier[root_id] identifier[elem] = identifier[self] . identifier[elem_dict] [ identifier[root_id] ] keyword[if] identifier[elem] [ literal[string] ]== literal[string] : keyword[return] identifier[n_wrap] ( identifier[tree] , identifier[debug] = identifier[debug] , identifier[root_id] = identifier[root_id] ) keyword[else] : keyword[return] identifier[s_wrap] ( identifier[tree] , identifier[debug] = identifier[debug] , identifier[root_id] = identifier[root_id] )
def elem_wrap(self, tree, debug=False, root_id=None): """takes a DGParentedTree and puts a nucleus or satellite on top, depending on the nuclearity of the root element of the tree. """ if root_id is None: root_id = tree.root_id # depends on [control=['if'], data=['root_id']] elem = self.elem_dict[root_id] if elem['nuclearity'] == 'nucleus': return n_wrap(tree, debug=debug, root_id=root_id) # depends on [control=['if'], data=[]] else: return s_wrap(tree, debug=debug, root_id=root_id)
async def add(ctx, left: int, right: int): """Adds two numbers together.""" await ctx.send(left + right)
<ast.AsyncFunctionDef object at 0x7da1b1ea21d0>
keyword[async] keyword[def] identifier[add] ( identifier[ctx] , identifier[left] : identifier[int] , identifier[right] : identifier[int] ): literal[string] keyword[await] identifier[ctx] . identifier[send] ( identifier[left] + identifier[right] )
async def add(ctx, left: int, right: int): """Adds two numbers together.""" await ctx.send(left + right)
def stringify_col(df, col_name): """ Take a dataframe and string-i-fy a column of values. Turn nan/None into "" and all other values into strings. Parameters ---------- df : dataframe col_name : string """ df = df.copy() df[col_name] = df[col_name].fillna("") df[col_name] = df[col_name].astype(str) return df
def function[stringify_col, parameter[df, col_name]]: constant[ Take a dataframe and string-i-fy a column of values. Turn nan/None into "" and all other values into strings. Parameters ---------- df : dataframe col_name : string ] variable[df] assign[=] call[name[df].copy, parameter[]] call[name[df]][name[col_name]] assign[=] call[call[name[df]][name[col_name]].fillna, parameter[constant[]]] call[name[df]][name[col_name]] assign[=] call[call[name[df]][name[col_name]].astype, parameter[name[str]]] return[name[df]]
keyword[def] identifier[stringify_col] ( identifier[df] , identifier[col_name] ): literal[string] identifier[df] = identifier[df] . identifier[copy] () identifier[df] [ identifier[col_name] ]= identifier[df] [ identifier[col_name] ]. identifier[fillna] ( literal[string] ) identifier[df] [ identifier[col_name] ]= identifier[df] [ identifier[col_name] ]. identifier[astype] ( identifier[str] ) keyword[return] identifier[df]
def stringify_col(df, col_name): """ Take a dataframe and string-i-fy a column of values. Turn nan/None into "" and all other values into strings. Parameters ---------- df : dataframe col_name : string """ df = df.copy() df[col_name] = df[col_name].fillna('') df[col_name] = df[col_name].astype(str) return df
def setup_client_rpc(self): """Setup RPC client for dfa agent.""" # Setup RPC client. self.clnt = rpc.DfaRpcClient(self._url, constants.DFA_SERVER_QUEUE, exchange=constants.DFA_EXCHANGE)
def function[setup_client_rpc, parameter[self]]: constant[Setup RPC client for dfa agent.] name[self].clnt assign[=] call[name[rpc].DfaRpcClient, parameter[name[self]._url, name[constants].DFA_SERVER_QUEUE]]
keyword[def] identifier[setup_client_rpc] ( identifier[self] ): literal[string] identifier[self] . identifier[clnt] = identifier[rpc] . identifier[DfaRpcClient] ( identifier[self] . identifier[_url] , identifier[constants] . identifier[DFA_SERVER_QUEUE] , identifier[exchange] = identifier[constants] . identifier[DFA_EXCHANGE] )
def setup_client_rpc(self): """Setup RPC client for dfa agent.""" # Setup RPC client. self.clnt = rpc.DfaRpcClient(self._url, constants.DFA_SERVER_QUEUE, exchange=constants.DFA_EXCHANGE)
def parseSpectra(self): """ #TODO: docstring :returns: #TODO: docstring """ #Note: the spectra need to be iterated completely to save the #metadataNode if self._parsed: raise TypeError('Mzml file already parsed.') self._parsed = True return self._parseMzml()
def function[parseSpectra, parameter[self]]: constant[ #TODO: docstring :returns: #TODO: docstring ] if name[self]._parsed begin[:] <ast.Raise object at 0x7da18c4cf610> name[self]._parsed assign[=] constant[True] return[call[name[self]._parseMzml, parameter[]]]
keyword[def] identifier[parseSpectra] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_parsed] : keyword[raise] identifier[TypeError] ( literal[string] ) identifier[self] . identifier[_parsed] = keyword[True] keyword[return] identifier[self] . identifier[_parseMzml] ()
def parseSpectra(self): """ #TODO: docstring :returns: #TODO: docstring """ #Note: the spectra need to be iterated completely to save the #metadataNode if self._parsed: raise TypeError('Mzml file already parsed.') # depends on [control=['if'], data=[]] self._parsed = True return self._parseMzml()
def isdir(path, message): """ Raise an exception if the given directory does not exist. :param path: The path to a directory to be tested :param message: A custom message to report in the exception :raises: FileNotFoundError """ if not os.path.isdir(path): raise FileNotFoundError( errno.ENOENT, "{}: {}".format(message, os.strerror(errno.ENOENT)), path)
def function[isdir, parameter[path, message]]: constant[ Raise an exception if the given directory does not exist. :param path: The path to a directory to be tested :param message: A custom message to report in the exception :raises: FileNotFoundError ] if <ast.UnaryOp object at 0x7da207f9a0b0> begin[:] <ast.Raise object at 0x7da204565c90>
keyword[def] identifier[isdir] ( identifier[path] , identifier[message] ): literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[path] ): keyword[raise] identifier[FileNotFoundError] ( identifier[errno] . identifier[ENOENT] , literal[string] . identifier[format] ( identifier[message] , identifier[os] . identifier[strerror] ( identifier[errno] . identifier[ENOENT] )), identifier[path] )
def isdir(path, message): """ Raise an exception if the given directory does not exist. :param path: The path to a directory to be tested :param message: A custom message to report in the exception :raises: FileNotFoundError """ if not os.path.isdir(path): raise FileNotFoundError(errno.ENOENT, '{}: {}'.format(message, os.strerror(errno.ENOENT)), path) # depends on [control=['if'], data=[]]
def download_ts(self, path, chunk, process_last_line=True): """ This will look for a download ts link. It will then download that file and replace the link with the local file. :param process_last_line: :param path: str of the path to put the file :param chunk: str of the chunk file, note this could have partial lines :return: str of the chunk with the local file link """ import glob ret_chunk = [] partial_chunk = '' lines = chunk.strip().split('\n') if not process_last_line: partial_chunk = lines.pop() for line in lines: if line.startswith('http:'): ts = '%s/%s.ts' % (path, line.split('.ts?')[0].split('/')[-1]) relative_ts = '%s/%s.ts' % ( path.split('/')[-1], line.split('.ts?')[0].split('/')[-1]) if not os.path.exists(ts): # this could be a repeat call # log.debug("Downloading: %s at %s" % (line, time.time())) gevent.spawn(ApiCall.save_url_to_file, line, ts).start() gevent.sleep(0) ret_chunk.append('# ' + line) ret_chunk.append(relative_ts) # log.debug("Done Downloading = %s"%time.time()) else: ret_chunk = [] # start over else: ret_chunk.append(line) if '#EXT-X-ENDLIST' in chunk: self.repeat_needed = 0 gevent.sleep(0) elif chunk.strip(): self.repeat_needed = 1 + len(glob.glob(path + '/*.ts')) ret_chunk = ret_chunk and '\n'.join(ret_chunk) + '\n' or '' return ret_chunk, partial_chunk
def function[download_ts, parameter[self, path, chunk, process_last_line]]: constant[ This will look for a download ts link. It will then download that file and replace the link with the local file. :param process_last_line: :param path: str of the path to put the file :param chunk: str of the chunk file, note this could have partial lines :return: str of the chunk with the local file link ] import module[glob] variable[ret_chunk] assign[=] list[[]] variable[partial_chunk] assign[=] constant[] variable[lines] assign[=] call[call[name[chunk].strip, parameter[]].split, parameter[constant[ ]]] if <ast.UnaryOp object at 0x7da1b16227d0> begin[:] variable[partial_chunk] assign[=] call[name[lines].pop, parameter[]] for taget[name[line]] in starred[name[lines]] begin[:] if call[name[line].startswith, parameter[constant[http:]]] begin[:] variable[ts] assign[=] binary_operation[constant[%s/%s.ts] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1621fc0>, <ast.Subscript object at 0x7da1b16214e0>]]] variable[relative_ts] assign[=] binary_operation[constant[%s/%s.ts] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b1620f10>, <ast.Subscript object at 0x7da1b1620fa0>]]] if <ast.UnaryOp object at 0x7da1b1622d40> begin[:] call[call[name[gevent].spawn, parameter[name[ApiCall].save_url_to_file, name[line], name[ts]]].start, parameter[]] call[name[gevent].sleep, parameter[constant[0]]] call[name[ret_chunk].append, parameter[binary_operation[constant[# ] + name[line]]]] call[name[ret_chunk].append, parameter[name[relative_ts]]] if compare[constant[#EXT-X-ENDLIST] in name[chunk]] begin[:] name[self].repeat_needed assign[=] constant[0] call[name[gevent].sleep, parameter[constant[0]]] variable[ret_chunk] assign[=] <ast.BoolOp object at 0x7da20e9565c0> return[tuple[[<ast.Name object at 0x7da20e9578e0>, <ast.Name object at 0x7da20e957e20>]]]
keyword[def] identifier[download_ts] ( identifier[self] , identifier[path] , identifier[chunk] , identifier[process_last_line] = keyword[True] ): literal[string] keyword[import] identifier[glob] identifier[ret_chunk] =[] identifier[partial_chunk] = literal[string] identifier[lines] = identifier[chunk] . identifier[strip] (). identifier[split] ( literal[string] ) keyword[if] keyword[not] identifier[process_last_line] : identifier[partial_chunk] = identifier[lines] . identifier[pop] () keyword[for] identifier[line] keyword[in] identifier[lines] : keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): identifier[ts] = literal[string] %( identifier[path] , identifier[line] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )[- literal[int] ]) identifier[relative_ts] = literal[string] %( identifier[path] . identifier[split] ( literal[string] )[- literal[int] ], identifier[line] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )[- literal[int] ]) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[ts] ): identifier[gevent] . identifier[spawn] ( identifier[ApiCall] . identifier[save_url_to_file] , identifier[line] , identifier[ts] ). identifier[start] () identifier[gevent] . identifier[sleep] ( literal[int] ) identifier[ret_chunk] . identifier[append] ( literal[string] + identifier[line] ) identifier[ret_chunk] . identifier[append] ( identifier[relative_ts] ) keyword[else] : identifier[ret_chunk] =[] keyword[else] : identifier[ret_chunk] . identifier[append] ( identifier[line] ) keyword[if] literal[string] keyword[in] identifier[chunk] : identifier[self] . identifier[repeat_needed] = literal[int] identifier[gevent] . identifier[sleep] ( literal[int] ) keyword[elif] identifier[chunk] . identifier[strip] (): identifier[self] . identifier[repeat_needed] = literal[int] + identifier[len] ( identifier[glob] . identifier[glob] ( identifier[path] + literal[string] )) identifier[ret_chunk] = identifier[ret_chunk] keyword[and] literal[string] . identifier[join] ( identifier[ret_chunk] )+ literal[string] keyword[or] literal[string] keyword[return] identifier[ret_chunk] , identifier[partial_chunk]
def download_ts(self, path, chunk, process_last_line=True): """ This will look for a download ts link. It will then download that file and replace the link with the local file. :param process_last_line: :param path: str of the path to put the file :param chunk: str of the chunk file, note this could have partial lines :return: str of the chunk with the local file link """ import glob ret_chunk = [] partial_chunk = '' lines = chunk.strip().split('\n') if not process_last_line: partial_chunk = lines.pop() # depends on [control=['if'], data=[]] for line in lines: if line.startswith('http:'): ts = '%s/%s.ts' % (path, line.split('.ts?')[0].split('/')[-1]) relative_ts = '%s/%s.ts' % (path.split('/')[-1], line.split('.ts?')[0].split('/')[-1]) if not os.path.exists(ts): # this could be a repeat call # log.debug("Downloading: %s at %s" % (line, time.time())) gevent.spawn(ApiCall.save_url_to_file, line, ts).start() gevent.sleep(0) ret_chunk.append('# ' + line) ret_chunk.append(relative_ts) # depends on [control=['if'], data=[]] else: # log.debug("Done Downloading = %s"%time.time()) ret_chunk = [] # start over # depends on [control=['if'], data=[]] else: ret_chunk.append(line) # depends on [control=['for'], data=['line']] if '#EXT-X-ENDLIST' in chunk: self.repeat_needed = 0 gevent.sleep(0) # depends on [control=['if'], data=[]] elif chunk.strip(): self.repeat_needed = 1 + len(glob.glob(path + '/*.ts')) # depends on [control=['if'], data=[]] ret_chunk = ret_chunk and '\n'.join(ret_chunk) + '\n' or '' return (ret_chunk, partial_chunk)
def list_courses(self): """ List enrolled courses. @return: List of enrolled courses. @rtype: [str] """ course = CourseraOnDemand(session=self._session, course_id=None, course_name=None) return course.list_courses()
def function[list_courses, parameter[self]]: constant[ List enrolled courses. @return: List of enrolled courses. @rtype: [str] ] variable[course] assign[=] call[name[CourseraOnDemand], parameter[]] return[call[name[course].list_courses, parameter[]]]
keyword[def] identifier[list_courses] ( identifier[self] ): literal[string] identifier[course] = identifier[CourseraOnDemand] ( identifier[session] = identifier[self] . identifier[_session] , identifier[course_id] = keyword[None] , identifier[course_name] = keyword[None] ) keyword[return] identifier[course] . identifier[list_courses] ()
def list_courses(self): """ List enrolled courses. @return: List of enrolled courses. @rtype: [str] """ course = CourseraOnDemand(session=self._session, course_id=None, course_name=None) return course.list_courses()
def new_filter( self, contract_address: Address, topics: List[str] = None, from_block: BlockSpecification = 0, to_block: BlockSpecification = 'latest', ) -> StatelessFilter: """ Create a filter in the ethereum node. """ logs_blocks_sanity_check(from_block, to_block) return StatelessFilter( self.web3, { 'fromBlock': from_block, 'toBlock': to_block, 'address': to_checksum_address(contract_address), 'topics': topics, }, )
def function[new_filter, parameter[self, contract_address, topics, from_block, to_block]]: constant[ Create a filter in the ethereum node. ] call[name[logs_blocks_sanity_check], parameter[name[from_block], name[to_block]]] return[call[name[StatelessFilter], parameter[name[self].web3, dictionary[[<ast.Constant object at 0x7da1b1712950>, <ast.Constant object at 0x7da1b1712bc0>, <ast.Constant object at 0x7da1b17103a0>, <ast.Constant object at 0x7da1b1712230>], [<ast.Name object at 0x7da1b17107f0>, <ast.Name object at 0x7da1b1710d30>, <ast.Call object at 0x7da1b1712710>, <ast.Name object at 0x7da1b1712e30>]]]]]
keyword[def] identifier[new_filter] ( identifier[self] , identifier[contract_address] : identifier[Address] , identifier[topics] : identifier[List] [ identifier[str] ]= keyword[None] , identifier[from_block] : identifier[BlockSpecification] = literal[int] , identifier[to_block] : identifier[BlockSpecification] = literal[string] , )-> identifier[StatelessFilter] : literal[string] identifier[logs_blocks_sanity_check] ( identifier[from_block] , identifier[to_block] ) keyword[return] identifier[StatelessFilter] ( identifier[self] . identifier[web3] , { literal[string] : identifier[from_block] , literal[string] : identifier[to_block] , literal[string] : identifier[to_checksum_address] ( identifier[contract_address] ), literal[string] : identifier[topics] , }, )
def new_filter(self, contract_address: Address, topics: List[str]=None, from_block: BlockSpecification=0, to_block: BlockSpecification='latest') -> StatelessFilter: """ Create a filter in the ethereum node. """ logs_blocks_sanity_check(from_block, to_block) return StatelessFilter(self.web3, {'fromBlock': from_block, 'toBlock': to_block, 'address': to_checksum_address(contract_address), 'topics': topics})
def IMTF(v): """In place inverse move to front transform. """ #mtf is initialized virtually with range(infinity) mtf = [] for i, vi in enumerate(v): #get old value from mtf. If never seen, take virtual value try: value = mtf.pop(vi) except IndexError: value = vi #put value at front mtf.insert(0, value) #replace transformed value v[i] = value
def function[IMTF, parameter[v]]: constant[In place inverse move to front transform. ] variable[mtf] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18eb54c40>, <ast.Name object at 0x7da20e9b35b0>]]] in starred[call[name[enumerate], parameter[name[v]]]] begin[:] <ast.Try object at 0x7da20e9b0e80> call[name[mtf].insert, parameter[constant[0], name[value]]] call[name[v]][name[i]] assign[=] name[value]
keyword[def] identifier[IMTF] ( identifier[v] ): literal[string] identifier[mtf] =[] keyword[for] identifier[i] , identifier[vi] keyword[in] identifier[enumerate] ( identifier[v] ): keyword[try] : identifier[value] = identifier[mtf] . identifier[pop] ( identifier[vi] ) keyword[except] identifier[IndexError] : identifier[value] = identifier[vi] identifier[mtf] . identifier[insert] ( literal[int] , identifier[value] ) identifier[v] [ identifier[i] ]= identifier[value]
def IMTF(v): """In place inverse move to front transform. """ #mtf is initialized virtually with range(infinity) mtf = [] for (i, vi) in enumerate(v): #get old value from mtf. If never seen, take virtual value try: value = mtf.pop(vi) # depends on [control=['try'], data=[]] except IndexError: value = vi # depends on [control=['except'], data=[]] #put value at front mtf.insert(0, value) #replace transformed value v[i] = value # depends on [control=['for'], data=[]]
def base_image_inspect(self): """ inspect base image :return: dict """ if self._base_image_inspect is None: if self.base_from_scratch: self._base_image_inspect = {} elif self.parents_pulled or self.custom_base_image: try: self._base_image_inspect = self.tasker.inspect_image(self.base_image) except docker.errors.NotFound: # If the base image cannot be found throw KeyError - # as this property should behave like a dict raise KeyError("Unprocessed base image Dockerfile cannot be inspected") else: self._base_image_inspect =\ atomic_reactor.util.get_inspect_for_image(self.base_image, self.base_image.registry, self.base_image_insecure, self.base_image_dockercfg_path) base_image_str = str(self.base_image) if base_image_str not in self._parent_images_inspect: self._parent_images_inspect[base_image_str] = self._base_image_inspect return self._base_image_inspect
def function[base_image_inspect, parameter[self]]: constant[ inspect base image :return: dict ] if compare[name[self]._base_image_inspect is constant[None]] begin[:] if name[self].base_from_scratch begin[:] name[self]._base_image_inspect assign[=] dictionary[[], []] variable[base_image_str] assign[=] call[name[str], parameter[name[self].base_image]] if compare[name[base_image_str] <ast.NotIn object at 0x7da2590d7190> name[self]._parent_images_inspect] begin[:] call[name[self]._parent_images_inspect][name[base_image_str]] assign[=] name[self]._base_image_inspect return[name[self]._base_image_inspect]
keyword[def] identifier[base_image_inspect] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_base_image_inspect] keyword[is] keyword[None] : keyword[if] identifier[self] . identifier[base_from_scratch] : identifier[self] . identifier[_base_image_inspect] ={} keyword[elif] identifier[self] . identifier[parents_pulled] keyword[or] identifier[self] . identifier[custom_base_image] : keyword[try] : identifier[self] . identifier[_base_image_inspect] = identifier[self] . identifier[tasker] . identifier[inspect_image] ( identifier[self] . identifier[base_image] ) keyword[except] identifier[docker] . identifier[errors] . identifier[NotFound] : keyword[raise] identifier[KeyError] ( literal[string] ) keyword[else] : identifier[self] . identifier[_base_image_inspect] = identifier[atomic_reactor] . identifier[util] . identifier[get_inspect_for_image] ( identifier[self] . identifier[base_image] , identifier[self] . identifier[base_image] . identifier[registry] , identifier[self] . identifier[base_image_insecure] , identifier[self] . identifier[base_image_dockercfg_path] ) identifier[base_image_str] = identifier[str] ( identifier[self] . identifier[base_image] ) keyword[if] identifier[base_image_str] keyword[not] keyword[in] identifier[self] . identifier[_parent_images_inspect] : identifier[self] . identifier[_parent_images_inspect] [ identifier[base_image_str] ]= identifier[self] . identifier[_base_image_inspect] keyword[return] identifier[self] . identifier[_base_image_inspect]
def base_image_inspect(self): """ inspect base image :return: dict """ if self._base_image_inspect is None: if self.base_from_scratch: self._base_image_inspect = {} # depends on [control=['if'], data=[]] elif self.parents_pulled or self.custom_base_image: try: self._base_image_inspect = self.tasker.inspect_image(self.base_image) # depends on [control=['try'], data=[]] except docker.errors.NotFound: # If the base image cannot be found throw KeyError - # as this property should behave like a dict raise KeyError('Unprocessed base image Dockerfile cannot be inspected') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: self._base_image_inspect = atomic_reactor.util.get_inspect_for_image(self.base_image, self.base_image.registry, self.base_image_insecure, self.base_image_dockercfg_path) base_image_str = str(self.base_image) if base_image_str not in self._parent_images_inspect: self._parent_images_inspect[base_image_str] = self._base_image_inspect # depends on [control=['if'], data=['base_image_str']] # depends on [control=['if'], data=[]] return self._base_image_inspect
def save_copy_as(self, index=None): """Save copy of file as... Args: index: self.data index for the file to save. Returns: False if no file name was selected or if save() was unsuccessful. True is save() was successful. Gets the new file name from select_savename(). If no name is chosen, then the save_copy_as() aborts. Otherwise, the current stack is checked to see if the selected name already exists and, if so, then the tab with that name is closed. Unlike save_as(), this calls write() directly instead of using save(). The current file and tab aren't changed at all. The copied file is opened in a new tab. """ if index is None: # Save the currently edited file index = self.get_stack_index() finfo = self.data[index] original_filename = finfo.filename filename = self.select_savename(original_filename) if filename: ao_index = self.has_filename(filename) # Note: ao_index == index --> saving an untitled file if ao_index is not None and ao_index != index: if not self.close_file(ao_index): return if ao_index < index: index -= 1 try: self._write_to_file(finfo, filename) # open created copy file self.plugin_load.emit(filename) return True except EnvironmentError as error: self.msgbox = QMessageBox( QMessageBox.Critical, _("Save Error"), _("<b>Unable to save file '%s'</b>" "<br><br>Error message:<br>%s" ) % (osp.basename(finfo.filename), str(error)), parent=self) self.msgbox.exec_() else: return False
def function[save_copy_as, parameter[self, index]]: constant[Save copy of file as... Args: index: self.data index for the file to save. Returns: False if no file name was selected or if save() was unsuccessful. True is save() was successful. Gets the new file name from select_savename(). If no name is chosen, then the save_copy_as() aborts. Otherwise, the current stack is checked to see if the selected name already exists and, if so, then the tab with that name is closed. Unlike save_as(), this calls write() directly instead of using save(). The current file and tab aren't changed at all. The copied file is opened in a new tab. ] if compare[name[index] is constant[None]] begin[:] variable[index] assign[=] call[name[self].get_stack_index, parameter[]] variable[finfo] assign[=] call[name[self].data][name[index]] variable[original_filename] assign[=] name[finfo].filename variable[filename] assign[=] call[name[self].select_savename, parameter[name[original_filename]]] if name[filename] begin[:] variable[ao_index] assign[=] call[name[self].has_filename, parameter[name[filename]]] if <ast.BoolOp object at 0x7da20e9b2860> begin[:] if <ast.UnaryOp object at 0x7da20e9b27a0> begin[:] return[None] if compare[name[ao_index] less[<] name[index]] begin[:] <ast.AugAssign object at 0x7da20e9b0760> <ast.Try object at 0x7da20e9b1870>
keyword[def] identifier[save_copy_as] ( identifier[self] , identifier[index] = keyword[None] ): literal[string] keyword[if] identifier[index] keyword[is] keyword[None] : identifier[index] = identifier[self] . identifier[get_stack_index] () identifier[finfo] = identifier[self] . identifier[data] [ identifier[index] ] identifier[original_filename] = identifier[finfo] . identifier[filename] identifier[filename] = identifier[self] . identifier[select_savename] ( identifier[original_filename] ) keyword[if] identifier[filename] : identifier[ao_index] = identifier[self] . identifier[has_filename] ( identifier[filename] ) keyword[if] identifier[ao_index] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ao_index] != identifier[index] : keyword[if] keyword[not] identifier[self] . identifier[close_file] ( identifier[ao_index] ): keyword[return] keyword[if] identifier[ao_index] < identifier[index] : identifier[index] -= literal[int] keyword[try] : identifier[self] . identifier[_write_to_file] ( identifier[finfo] , identifier[filename] ) identifier[self] . identifier[plugin_load] . identifier[emit] ( identifier[filename] ) keyword[return] keyword[True] keyword[except] identifier[EnvironmentError] keyword[as] identifier[error] : identifier[self] . identifier[msgbox] = identifier[QMessageBox] ( identifier[QMessageBox] . identifier[Critical] , identifier[_] ( literal[string] ), identifier[_] ( literal[string] literal[string] )%( identifier[osp] . identifier[basename] ( identifier[finfo] . identifier[filename] ), identifier[str] ( identifier[error] )), identifier[parent] = identifier[self] ) identifier[self] . identifier[msgbox] . identifier[exec_] () keyword[else] : keyword[return] keyword[False]
def save_copy_as(self, index=None): """Save copy of file as... Args: index: self.data index for the file to save. Returns: False if no file name was selected or if save() was unsuccessful. True is save() was successful. Gets the new file name from select_savename(). If no name is chosen, then the save_copy_as() aborts. Otherwise, the current stack is checked to see if the selected name already exists and, if so, then the tab with that name is closed. Unlike save_as(), this calls write() directly instead of using save(). The current file and tab aren't changed at all. The copied file is opened in a new tab. """ if index is None: # Save the currently edited file index = self.get_stack_index() # depends on [control=['if'], data=['index']] finfo = self.data[index] original_filename = finfo.filename filename = self.select_savename(original_filename) if filename: ao_index = self.has_filename(filename) # Note: ao_index == index --> saving an untitled file if ao_index is not None and ao_index != index: if not self.close_file(ao_index): return # depends on [control=['if'], data=[]] if ao_index < index: index -= 1 # depends on [control=['if'], data=['index']] # depends on [control=['if'], data=[]] try: self._write_to_file(finfo, filename) # open created copy file self.plugin_load.emit(filename) return True # depends on [control=['try'], data=[]] except EnvironmentError as error: self.msgbox = QMessageBox(QMessageBox.Critical, _('Save Error'), _("<b>Unable to save file '%s'</b><br><br>Error message:<br>%s") % (osp.basename(finfo.filename), str(error)), parent=self) self.msgbox.exec_() # depends on [control=['except'], data=['error']] # depends on [control=['if'], data=[]] else: return False
def pretty(obj, verbose=False, max_width=79, newline='\n'): """ Pretty print the object's representation. """ stream = StringIO() printer = RepresentationPrinter(stream, verbose, max_width, newline) printer.pretty(obj) printer.flush() return stream.getvalue()
def function[pretty, parameter[obj, verbose, max_width, newline]]: constant[ Pretty print the object's representation. ] variable[stream] assign[=] call[name[StringIO], parameter[]] variable[printer] assign[=] call[name[RepresentationPrinter], parameter[name[stream], name[verbose], name[max_width], name[newline]]] call[name[printer].pretty, parameter[name[obj]]] call[name[printer].flush, parameter[]] return[call[name[stream].getvalue, parameter[]]]
keyword[def] identifier[pretty] ( identifier[obj] , identifier[verbose] = keyword[False] , identifier[max_width] = literal[int] , identifier[newline] = literal[string] ): literal[string] identifier[stream] = identifier[StringIO] () identifier[printer] = identifier[RepresentationPrinter] ( identifier[stream] , identifier[verbose] , identifier[max_width] , identifier[newline] ) identifier[printer] . identifier[pretty] ( identifier[obj] ) identifier[printer] . identifier[flush] () keyword[return] identifier[stream] . identifier[getvalue] ()
def pretty(obj, verbose=False, max_width=79, newline='\n'): """ Pretty print the object's representation. """ stream = StringIO() printer = RepresentationPrinter(stream, verbose, max_width, newline) printer.pretty(obj) printer.flush() return stream.getvalue()
def header(self, text, level, raw=None): """Rendering header/heading tags like ``<h1>`` ``<h2>``. :param text: rendered text content for the header. :param level: a number for the header level, for example: 1. :param raw: raw text content of the header. """ return '\n{0}\n{1}\n'.format(text, self.hmarks[level] * column_width(text))
def function[header, parameter[self, text, level, raw]]: constant[Rendering header/heading tags like ``<h1>`` ``<h2>``. :param text: rendered text content for the header. :param level: a number for the header level, for example: 1. :param raw: raw text content of the header. ] return[call[constant[ {0} {1} ].format, parameter[name[text], binary_operation[call[name[self].hmarks][name[level]] * call[name[column_width], parameter[name[text]]]]]]]
keyword[def] identifier[header] ( identifier[self] , identifier[text] , identifier[level] , identifier[raw] = keyword[None] ): literal[string] keyword[return] literal[string] . identifier[format] ( identifier[text] , identifier[self] . identifier[hmarks] [ identifier[level] ]* identifier[column_width] ( identifier[text] ))
def header(self, text, level, raw=None): """Rendering header/heading tags like ``<h1>`` ``<h2>``. :param text: rendered text content for the header. :param level: a number for the header level, for example: 1. :param raw: raw text content of the header. """ return '\n{0}\n{1}\n'.format(text, self.hmarks[level] * column_width(text))
def get_readline_tail(self, n=10): """Get the last n items in readline history.""" end = self.shell.readline.get_current_history_length() + 1 start = max(end-n, 1) ghi = self.shell.readline.get_history_item return [ghi(x) for x in range(start, end)]
def function[get_readline_tail, parameter[self, n]]: constant[Get the last n items in readline history.] variable[end] assign[=] binary_operation[call[name[self].shell.readline.get_current_history_length, parameter[]] + constant[1]] variable[start] assign[=] call[name[max], parameter[binary_operation[name[end] - name[n]], constant[1]]] variable[ghi] assign[=] name[self].shell.readline.get_history_item return[<ast.ListComp object at 0x7da18ede7a00>]
keyword[def] identifier[get_readline_tail] ( identifier[self] , identifier[n] = literal[int] ): literal[string] identifier[end] = identifier[self] . identifier[shell] . identifier[readline] . identifier[get_current_history_length] ()+ literal[int] identifier[start] = identifier[max] ( identifier[end] - identifier[n] , literal[int] ) identifier[ghi] = identifier[self] . identifier[shell] . identifier[readline] . identifier[get_history_item] keyword[return] [ identifier[ghi] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[start] , identifier[end] )]
def get_readline_tail(self, n=10): """Get the last n items in readline history.""" end = self.shell.readline.get_current_history_length() + 1 start = max(end - n, 1) ghi = self.shell.readline.get_history_item return [ghi(x) for x in range(start, end)]
def SwitchToAlert(): ''' <input value="Test" type="button" onClick="alert('OK')" > ''' try: alert = WebDriverWait(Web.driver, 10).until(lambda driver: driver.switch_to_alert()) return alert except: print("Waring: Timeout at %d seconds.Alert was not found.") return False
def function[SwitchToAlert, parameter[]]: constant[ <input value="Test" type="button" onClick="alert('OK')" > ] <ast.Try object at 0x7da18f09e680>
keyword[def] identifier[SwitchToAlert] (): literal[string] keyword[try] : identifier[alert] = identifier[WebDriverWait] ( identifier[Web] . identifier[driver] , literal[int] ). identifier[until] ( keyword[lambda] identifier[driver] : identifier[driver] . identifier[switch_to_alert] ()) keyword[return] identifier[alert] keyword[except] : identifier[print] ( literal[string] ) keyword[return] keyword[False]
def SwitchToAlert(): """ <input value="Test" type="button" onClick="alert('OK')" > """ try: alert = WebDriverWait(Web.driver, 10).until(lambda driver: driver.switch_to_alert()) return alert # depends on [control=['try'], data=[]] except: print('Waring: Timeout at %d seconds.Alert was not found.') return False # depends on [control=['except'], data=[]]
def validate_attribute(attr, name, expected_type=None, required=False): '''Validates that an attribute meets expectations. This function will check if the given attribute value matches a necessary type and/or is not None, an empty string, an empty list, etc. It will raise suitable exceptions on validation failure. @param attr The value to validate. @param name The attribute name to use in exceptions. @param expected_type The type the value must be. If None, no check is performed. If a list, attr must match one type in the list. @param required If the value must not be empty, e.g. not an empty string. @raises InvalidTypeError @raises RequiredAttributeError ''' if expected_type: if type(expected_type) == list: if not _check_type(attr, expected_type): raise InvalidTypeError(name, type(attr), expected_type) else: if not _check_type(attr, [expected_type]): raise InvalidTypeError(name, type(attr), expected_type) if required and not attr: raise RequiredAttributeError(name)
def function[validate_attribute, parameter[attr, name, expected_type, required]]: constant[Validates that an attribute meets expectations. This function will check if the given attribute value matches a necessary type and/or is not None, an empty string, an empty list, etc. It will raise suitable exceptions on validation failure. @param attr The value to validate. @param name The attribute name to use in exceptions. @param expected_type The type the value must be. If None, no check is performed. If a list, attr must match one type in the list. @param required If the value must not be empty, e.g. not an empty string. @raises InvalidTypeError @raises RequiredAttributeError ] if name[expected_type] begin[:] if compare[call[name[type], parameter[name[expected_type]]] equal[==] name[list]] begin[:] if <ast.UnaryOp object at 0x7da1b09eed70> begin[:] <ast.Raise object at 0x7da1b09ee530> if <ast.BoolOp object at 0x7da1b09ed6c0> begin[:] <ast.Raise object at 0x7da1b09ec0a0>
keyword[def] identifier[validate_attribute] ( identifier[attr] , identifier[name] , identifier[expected_type] = keyword[None] , identifier[required] = keyword[False] ): literal[string] keyword[if] identifier[expected_type] : keyword[if] identifier[type] ( identifier[expected_type] )== identifier[list] : keyword[if] keyword[not] identifier[_check_type] ( identifier[attr] , identifier[expected_type] ): keyword[raise] identifier[InvalidTypeError] ( identifier[name] , identifier[type] ( identifier[attr] ), identifier[expected_type] ) keyword[else] : keyword[if] keyword[not] identifier[_check_type] ( identifier[attr] ,[ identifier[expected_type] ]): keyword[raise] identifier[InvalidTypeError] ( identifier[name] , identifier[type] ( identifier[attr] ), identifier[expected_type] ) keyword[if] identifier[required] keyword[and] keyword[not] identifier[attr] : keyword[raise] identifier[RequiredAttributeError] ( identifier[name] )
def validate_attribute(attr, name, expected_type=None, required=False): """Validates that an attribute meets expectations. This function will check if the given attribute value matches a necessary type and/or is not None, an empty string, an empty list, etc. It will raise suitable exceptions on validation failure. @param attr The value to validate. @param name The attribute name to use in exceptions. @param expected_type The type the value must be. If None, no check is performed. If a list, attr must match one type in the list. @param required If the value must not be empty, e.g. not an empty string. @raises InvalidTypeError @raises RequiredAttributeError """ if expected_type: if type(expected_type) == list: if not _check_type(attr, expected_type): raise InvalidTypeError(name, type(attr), expected_type) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif not _check_type(attr, [expected_type]): raise InvalidTypeError(name, type(attr), expected_type) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if required and (not attr): raise RequiredAttributeError(name) # depends on [control=['if'], data=[]]
def _original_frame(self, x, y): """ Return x/y in the original frame, based on a guess as much as anything. :param x: x pixel coordinate :type x: float :param y: y pixel coordinate :type y: float :return: x,y :rtype: float, float """ if self._inverted: return self.obs.naxis1 - x, self.obs.naxis2 - y return x, y
def function[_original_frame, parameter[self, x, y]]: constant[ Return x/y in the original frame, based on a guess as much as anything. :param x: x pixel coordinate :type x: float :param y: y pixel coordinate :type y: float :return: x,y :rtype: float, float ] if name[self]._inverted begin[:] return[tuple[[<ast.BinOp object at 0x7da1b196ae60>, <ast.BinOp object at 0x7da1b1969900>]]] return[tuple[[<ast.Name object at 0x7da1b1969c90>, <ast.Name object at 0x7da1b196a320>]]]
keyword[def] identifier[_original_frame] ( identifier[self] , identifier[x] , identifier[y] ): literal[string] keyword[if] identifier[self] . identifier[_inverted] : keyword[return] identifier[self] . identifier[obs] . identifier[naxis1] - identifier[x] , identifier[self] . identifier[obs] . identifier[naxis2] - identifier[y] keyword[return] identifier[x] , identifier[y]
def _original_frame(self, x, y): """ Return x/y in the original frame, based on a guess as much as anything. :param x: x pixel coordinate :type x: float :param y: y pixel coordinate :type y: float :return: x,y :rtype: float, float """ if self._inverted: return (self.obs.naxis1 - x, self.obs.naxis2 - y) # depends on [control=['if'], data=[]] return (x, y)
def set_range_value(self, data): """ Validates date range by parsing into 2 datetime objects and validating them both. """ dtfrom = data.pop('value_from') dtto = data.pop('value_to') if dtfrom is dtto is None: self.errors['value'] = ['Date range requires values'] raise forms.ValidationError([]) data['value'] = (dtfrom, dtto)
def function[set_range_value, parameter[self, data]]: constant[ Validates date range by parsing into 2 datetime objects and validating them both. ] variable[dtfrom] assign[=] call[name[data].pop, parameter[constant[value_from]]] variable[dtto] assign[=] call[name[data].pop, parameter[constant[value_to]]] if compare[name[dtfrom] is name[dtto]] begin[:] call[name[self].errors][constant[value]] assign[=] list[[<ast.Constant object at 0x7da18c4cc490>]] <ast.Raise object at 0x7da18c4ccd60> call[name[data]][constant[value]] assign[=] tuple[[<ast.Name object at 0x7da18c4cdf00>, <ast.Name object at 0x7da18c4cfd90>]]
keyword[def] identifier[set_range_value] ( identifier[self] , identifier[data] ): literal[string] identifier[dtfrom] = identifier[data] . identifier[pop] ( literal[string] ) identifier[dtto] = identifier[data] . identifier[pop] ( literal[string] ) keyword[if] identifier[dtfrom] keyword[is] identifier[dtto] keyword[is] keyword[None] : identifier[self] . identifier[errors] [ literal[string] ]=[ literal[string] ] keyword[raise] identifier[forms] . identifier[ValidationError] ([]) identifier[data] [ literal[string] ]=( identifier[dtfrom] , identifier[dtto] )
def set_range_value(self, data): """ Validates date range by parsing into 2 datetime objects and validating them both. """ dtfrom = data.pop('value_from') dtto = data.pop('value_to') if dtfrom is dtto is None: self.errors['value'] = ['Date range requires values'] raise forms.ValidationError([]) # depends on [control=['if'], data=[]] data['value'] = (dtfrom, dtto)
def load_raw_data(assets, data_query_cutoff_times, expr, odo_kwargs, checkpoints=None): """ Given an expression representing data to load, perform normalization and forward-filling and return the data, materialized. Only accepts data with a `sid` field. Parameters ---------- assets : pd.int64index the assets to load data for. data_query_cutoff_times : pd.DatetimeIndex The datetime when data should no longer be considered available for a session. expr : expr the expression representing the data to load. odo_kwargs : dict extra keyword arguments to pass to odo when executing the expression. checkpoints : expr, optional the expression representing the checkpointed data for `expr`. Returns ------- raw : pd.dataframe The result of computing expr and materializing the result as a dataframe. """ lower_dt, upper_dt = data_query_cutoff_times[[0, -1]] raw = ffill_query_in_range( expr, lower_dt, upper_dt, checkpoints=checkpoints, odo_kwargs=odo_kwargs, ) sids = raw[SID_FIELD_NAME] raw.drop( sids[~sids.isin(assets)].index, inplace=True ) return raw
def function[load_raw_data, parameter[assets, data_query_cutoff_times, expr, odo_kwargs, checkpoints]]: constant[ Given an expression representing data to load, perform normalization and forward-filling and return the data, materialized. Only accepts data with a `sid` field. Parameters ---------- assets : pd.int64index the assets to load data for. data_query_cutoff_times : pd.DatetimeIndex The datetime when data should no longer be considered available for a session. expr : expr the expression representing the data to load. odo_kwargs : dict extra keyword arguments to pass to odo when executing the expression. checkpoints : expr, optional the expression representing the checkpointed data for `expr`. Returns ------- raw : pd.dataframe The result of computing expr and materializing the result as a dataframe. ] <ast.Tuple object at 0x7da18bc73d30> assign[=] call[name[data_query_cutoff_times]][list[[<ast.Constant object at 0x7da1b2005a20>, <ast.UnaryOp object at 0x7da1b20062f0>]]] variable[raw] assign[=] call[name[ffill_query_in_range], parameter[name[expr], name[lower_dt], name[upper_dt]]] variable[sids] assign[=] call[name[raw]][name[SID_FIELD_NAME]] call[name[raw].drop, parameter[call[name[sids]][<ast.UnaryOp object at 0x7da1b2005180>].index]] return[name[raw]]
keyword[def] identifier[load_raw_data] ( identifier[assets] , identifier[data_query_cutoff_times] , identifier[expr] , identifier[odo_kwargs] , identifier[checkpoints] = keyword[None] ): literal[string] identifier[lower_dt] , identifier[upper_dt] = identifier[data_query_cutoff_times] [[ literal[int] ,- literal[int] ]] identifier[raw] = identifier[ffill_query_in_range] ( identifier[expr] , identifier[lower_dt] , identifier[upper_dt] , identifier[checkpoints] = identifier[checkpoints] , identifier[odo_kwargs] = identifier[odo_kwargs] , ) identifier[sids] = identifier[raw] [ identifier[SID_FIELD_NAME] ] identifier[raw] . identifier[drop] ( identifier[sids] [~ identifier[sids] . identifier[isin] ( identifier[assets] )]. identifier[index] , identifier[inplace] = keyword[True] ) keyword[return] identifier[raw]
def load_raw_data(assets, data_query_cutoff_times, expr, odo_kwargs, checkpoints=None): """ Given an expression representing data to load, perform normalization and forward-filling and return the data, materialized. Only accepts data with a `sid` field. Parameters ---------- assets : pd.int64index the assets to load data for. data_query_cutoff_times : pd.DatetimeIndex The datetime when data should no longer be considered available for a session. expr : expr the expression representing the data to load. odo_kwargs : dict extra keyword arguments to pass to odo when executing the expression. checkpoints : expr, optional the expression representing the checkpointed data for `expr`. Returns ------- raw : pd.dataframe The result of computing expr and materializing the result as a dataframe. """ (lower_dt, upper_dt) = data_query_cutoff_times[[0, -1]] raw = ffill_query_in_range(expr, lower_dt, upper_dt, checkpoints=checkpoints, odo_kwargs=odo_kwargs) sids = raw[SID_FIELD_NAME] raw.drop(sids[~sids.isin(assets)].index, inplace=True) return raw
def namedb_get_num_history_rows( cur, history_id ): """ Get the history for a name or namespace from the history table. Use offset/count if given. """ ret = [] select_query = "SELECT COUNT(*) FROM history WHERE history_id = ? ORDER BY block_id ASC, vtxindex ASC;" args = (history_id,) count = namedb_select_count_rows( cur, select_query, args ) return count
def function[namedb_get_num_history_rows, parameter[cur, history_id]]: constant[ Get the history for a name or namespace from the history table. Use offset/count if given. ] variable[ret] assign[=] list[[]] variable[select_query] assign[=] constant[SELECT COUNT(*) FROM history WHERE history_id = ? ORDER BY block_id ASC, vtxindex ASC;] variable[args] assign[=] tuple[[<ast.Name object at 0x7da1b17d7850>]] variable[count] assign[=] call[name[namedb_select_count_rows], parameter[name[cur], name[select_query], name[args]]] return[name[count]]
keyword[def] identifier[namedb_get_num_history_rows] ( identifier[cur] , identifier[history_id] ): literal[string] identifier[ret] =[] identifier[select_query] = literal[string] identifier[args] =( identifier[history_id] ,) identifier[count] = identifier[namedb_select_count_rows] ( identifier[cur] , identifier[select_query] , identifier[args] ) keyword[return] identifier[count]
def namedb_get_num_history_rows(cur, history_id): """ Get the history for a name or namespace from the history table. Use offset/count if given. """ ret = [] select_query = 'SELECT COUNT(*) FROM history WHERE history_id = ? ORDER BY block_id ASC, vtxindex ASC;' args = (history_id,) count = namedb_select_count_rows(cur, select_query, args) return count
def dropColumnsFromRabaObjTable(self, name, lstFieldsToKeep) : "Removes columns from a RabaObj table. lstFieldsToKeep should not contain raba_id or json fileds" if len(lstFieldsToKeep) == 0 : raise ValueError("There are no fields to keep") cpy = name+'_copy' sqlFiledsStr = ', '.join(lstFieldsToKeep) self.createTable(cpy, 'raba_id INTEGER PRIMARY KEY AUTOINCREMENT, json, %s' % (sqlFiledsStr)) sql = "INSERT INTO %s SELECT %s FROM %s;" % (cpy, 'raba_id, json, %s' % sqlFiledsStr, name) self.execute(sql) self.dropTable(name) self.renameTable(cpy, name)
def function[dropColumnsFromRabaObjTable, parameter[self, name, lstFieldsToKeep]]: constant[Removes columns from a RabaObj table. lstFieldsToKeep should not contain raba_id or json fileds] if compare[call[name[len], parameter[name[lstFieldsToKeep]]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da1b0a2b760> variable[cpy] assign[=] binary_operation[name[name] + constant[_copy]] variable[sqlFiledsStr] assign[=] call[constant[, ].join, parameter[name[lstFieldsToKeep]]] call[name[self].createTable, parameter[name[cpy], binary_operation[constant[raba_id INTEGER PRIMARY KEY AUTOINCREMENT, json, %s] <ast.Mod object at 0x7da2590d6920> name[sqlFiledsStr]]]] variable[sql] assign[=] binary_operation[constant[INSERT INTO %s SELECT %s FROM %s;] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0a2afe0>, <ast.BinOp object at 0x7da1b0a2b640>, <ast.Name object at 0x7da1b0a2b610>]]] call[name[self].execute, parameter[name[sql]]] call[name[self].dropTable, parameter[name[name]]] call[name[self].renameTable, parameter[name[cpy], name[name]]]
keyword[def] identifier[dropColumnsFromRabaObjTable] ( identifier[self] , identifier[name] , identifier[lstFieldsToKeep] ): literal[string] keyword[if] identifier[len] ( identifier[lstFieldsToKeep] )== literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[cpy] = identifier[name] + literal[string] identifier[sqlFiledsStr] = literal[string] . identifier[join] ( identifier[lstFieldsToKeep] ) identifier[self] . identifier[createTable] ( identifier[cpy] , literal[string] %( identifier[sqlFiledsStr] )) identifier[sql] = literal[string] %( identifier[cpy] , literal[string] % identifier[sqlFiledsStr] , identifier[name] ) identifier[self] . identifier[execute] ( identifier[sql] ) identifier[self] . identifier[dropTable] ( identifier[name] ) identifier[self] . identifier[renameTable] ( identifier[cpy] , identifier[name] )
def dropColumnsFromRabaObjTable(self, name, lstFieldsToKeep): """Removes columns from a RabaObj table. lstFieldsToKeep should not contain raba_id or json fileds""" if len(lstFieldsToKeep) == 0: raise ValueError('There are no fields to keep') # depends on [control=['if'], data=[]] cpy = name + '_copy' sqlFiledsStr = ', '.join(lstFieldsToKeep) self.createTable(cpy, 'raba_id INTEGER PRIMARY KEY AUTOINCREMENT, json, %s' % sqlFiledsStr) sql = 'INSERT INTO %s SELECT %s FROM %s;' % (cpy, 'raba_id, json, %s' % sqlFiledsStr, name) self.execute(sql) self.dropTable(name) self.renameTable(cpy, name)
def produce_four_square_axes_explorer(four_square_axes, x_label=None, y_label=None, num_terms_semiotic_square=None, get_tooltip_content=None, x_axis_values=None, y_axis_values=None, color_func=None, axis_scaler=scale_neg_1_to_1_with_zero_mean, **kwargs): ''' Produces a semiotic square visualization. Parameters ---------- four_square : FourSquareAxes The basis of the visualization x_label : str The x-axis label in the scatter plot. Relationship between `category_a` and `category_b`. y_label The y-axis label in the scatter plot. Relationship neutral term and complex term. not_b_category_name: str or None Name of neutral set of data. Defaults to "Extra". num_terms_semiotic_square : int or None 10 by default. Number of terms to show in semiotic square. get_tooltip_content : str or None Defaults to tooltip showing z-scores on both axes. x_axis_values : list, default None Value-labels to show on x-axis. [-2.58, -1.96, 0, 1.96, 2.58] is the default y_axis_values : list, default None Value-labels to show on y-axis. [-2.58, -1.96, 0, 1.96, 2.58] is the default color_func : str, default None Javascript function to control color of a point. Function takes a parameter which is a dictionary entry produced by `ScatterChartExplorer.to_dict` and returns a string. Defaults to RdYlBl on x-axis, and varying saturation on y-axis. axis_scaler : lambda, default scale_neg_1_to_1_with_zero_mean_abs_max Scale values to fit axis Remaining arguments are from `produce_scattertext_explorer`. Returns ------- str, html of visualization ''' if x_label is None: x_label = four_square_axes.left_category_name_ + '-' + four_square_axes.right_category_name_ if y_label is None: y_label = four_square_axes.top_category_name_ + '-' + four_square_axes.bottom_category_name_ if get_tooltip_content is None: get_tooltip_content = '''(function(d) {return d.term + "<br/>%s: " + Math.round(d.ox*1000)/1000+"<br/>%s: " + Math.round(d.oy*1000)/1000})''' \ % (x_label, y_label) if color_func is None: # this desaturates # color_func = '(function(d) {var c = d3.hsl(d3.interpolateRdYlBu(d.x)); c.s *= d.y; return c;})' color_func = '(function(d) {return d3.interpolateRdYlBu(d.x)})' axes = four_square_axes.get_axes() if 'scores' not in kwargs: kwargs['scores'] = -axes['x'] ''' my_scaler = scale_neg_1_to_1_with_zero_mean_abs_max if foveate: my_scaler = scale_neg_1_to_1_with_zero_mean_rank_abs_max ''' return produce_scattertext_explorer( four_square_axes.term_doc_matrix_, category=four_square_axes.left_categories_[0], category_name=four_square_axes.left_category_name_, not_categories=four_square_axes.right_categories_, not_category_name=four_square_axes.right_category_name_, neutral_categories=four_square_axes.top_categories_, neutral_category_name=four_square_axes.top_category_name_, extra_categories=four_square_axes.bottom_categories_, extra_category_name=four_square_axes.bottom_category_name_, sort_by_dist=False, x_coords=axis_scaler(-axes['x']), y_coords=axis_scaler(axes['y']), original_x=-axes['x'], original_y=axes['y'], show_characteristic=False, show_top_terms=False, x_label=x_label, y_label=y_label, semiotic_square=four_square_axes, show_neutral=True, show_extra=True, num_terms_semiotic_square=num_terms_semiotic_square, get_tooltip_content=get_tooltip_content, x_axis_values=x_axis_values, y_axis_values=y_axis_values, color_func=color_func, show_axes=False, **kwargs )
def function[produce_four_square_axes_explorer, parameter[four_square_axes, x_label, y_label, num_terms_semiotic_square, get_tooltip_content, x_axis_values, y_axis_values, color_func, axis_scaler]]: constant[ Produces a semiotic square visualization. Parameters ---------- four_square : FourSquareAxes The basis of the visualization x_label : str The x-axis label in the scatter plot. Relationship between `category_a` and `category_b`. y_label The y-axis label in the scatter plot. Relationship neutral term and complex term. not_b_category_name: str or None Name of neutral set of data. Defaults to "Extra". num_terms_semiotic_square : int or None 10 by default. Number of terms to show in semiotic square. get_tooltip_content : str or None Defaults to tooltip showing z-scores on both axes. x_axis_values : list, default None Value-labels to show on x-axis. [-2.58, -1.96, 0, 1.96, 2.58] is the default y_axis_values : list, default None Value-labels to show on y-axis. [-2.58, -1.96, 0, 1.96, 2.58] is the default color_func : str, default None Javascript function to control color of a point. Function takes a parameter which is a dictionary entry produced by `ScatterChartExplorer.to_dict` and returns a string. Defaults to RdYlBl on x-axis, and varying saturation on y-axis. axis_scaler : lambda, default scale_neg_1_to_1_with_zero_mean_abs_max Scale values to fit axis Remaining arguments are from `produce_scattertext_explorer`. Returns ------- str, html of visualization ] if compare[name[x_label] is constant[None]] begin[:] variable[x_label] assign[=] binary_operation[binary_operation[name[four_square_axes].left_category_name_ + constant[-]] + name[four_square_axes].right_category_name_] if compare[name[y_label] is constant[None]] begin[:] variable[y_label] assign[=] binary_operation[binary_operation[name[four_square_axes].top_category_name_ + constant[-]] + name[four_square_axes].bottom_category_name_] if compare[name[get_tooltip_content] is constant[None]] begin[:] variable[get_tooltip_content] assign[=] binary_operation[constant[(function(d) {return d.term + "<br/>%s: " + Math.round(d.ox*1000)/1000+"<br/>%s: " + Math.round(d.oy*1000)/1000})] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1a374c0>, <ast.Name object at 0x7da1b1a37490>]]] if compare[name[color_func] is constant[None]] begin[:] variable[color_func] assign[=] constant[(function(d) {return d3.interpolateRdYlBu(d.x)})] variable[axes] assign[=] call[name[four_square_axes].get_axes, parameter[]] if compare[constant[scores] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:] call[name[kwargs]][constant[scores]] assign[=] <ast.UnaryOp object at 0x7da1b1a370a0> constant[ my_scaler = scale_neg_1_to_1_with_zero_mean_abs_max if foveate: my_scaler = scale_neg_1_to_1_with_zero_mean_rank_abs_max ] return[call[name[produce_scattertext_explorer], parameter[name[four_square_axes].term_doc_matrix_]]]
keyword[def] identifier[produce_four_square_axes_explorer] ( identifier[four_square_axes] , identifier[x_label] = keyword[None] , identifier[y_label] = keyword[None] , identifier[num_terms_semiotic_square] = keyword[None] , identifier[get_tooltip_content] = keyword[None] , identifier[x_axis_values] = keyword[None] , identifier[y_axis_values] = keyword[None] , identifier[color_func] = keyword[None] , identifier[axis_scaler] = identifier[scale_neg_1_to_1_with_zero_mean] , ** identifier[kwargs] ): literal[string] keyword[if] identifier[x_label] keyword[is] keyword[None] : identifier[x_label] = identifier[four_square_axes] . identifier[left_category_name_] + literal[string] + identifier[four_square_axes] . identifier[right_category_name_] keyword[if] identifier[y_label] keyword[is] keyword[None] : identifier[y_label] = identifier[four_square_axes] . identifier[top_category_name_] + literal[string] + identifier[four_square_axes] . identifier[bottom_category_name_] keyword[if] identifier[get_tooltip_content] keyword[is] keyword[None] : identifier[get_tooltip_content] = literal[string] %( identifier[x_label] , identifier[y_label] ) keyword[if] identifier[color_func] keyword[is] keyword[None] : identifier[color_func] = literal[string] identifier[axes] = identifier[four_square_axes] . identifier[get_axes] () keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] : identifier[kwargs] [ literal[string] ]=- identifier[axes] [ literal[string] ] literal[string] keyword[return] identifier[produce_scattertext_explorer] ( identifier[four_square_axes] . identifier[term_doc_matrix_] , identifier[category] = identifier[four_square_axes] . identifier[left_categories_] [ literal[int] ], identifier[category_name] = identifier[four_square_axes] . identifier[left_category_name_] , identifier[not_categories] = identifier[four_square_axes] . identifier[right_categories_] , identifier[not_category_name] = identifier[four_square_axes] . identifier[right_category_name_] , identifier[neutral_categories] = identifier[four_square_axes] . identifier[top_categories_] , identifier[neutral_category_name] = identifier[four_square_axes] . identifier[top_category_name_] , identifier[extra_categories] = identifier[four_square_axes] . identifier[bottom_categories_] , identifier[extra_category_name] = identifier[four_square_axes] . identifier[bottom_category_name_] , identifier[sort_by_dist] = keyword[False] , identifier[x_coords] = identifier[axis_scaler] (- identifier[axes] [ literal[string] ]), identifier[y_coords] = identifier[axis_scaler] ( identifier[axes] [ literal[string] ]), identifier[original_x] =- identifier[axes] [ literal[string] ], identifier[original_y] = identifier[axes] [ literal[string] ], identifier[show_characteristic] = keyword[False] , identifier[show_top_terms] = keyword[False] , identifier[x_label] = identifier[x_label] , identifier[y_label] = identifier[y_label] , identifier[semiotic_square] = identifier[four_square_axes] , identifier[show_neutral] = keyword[True] , identifier[show_extra] = keyword[True] , identifier[num_terms_semiotic_square] = identifier[num_terms_semiotic_square] , identifier[get_tooltip_content] = identifier[get_tooltip_content] , identifier[x_axis_values] = identifier[x_axis_values] , identifier[y_axis_values] = identifier[y_axis_values] , identifier[color_func] = identifier[color_func] , identifier[show_axes] = keyword[False] , ** identifier[kwargs] )
def produce_four_square_axes_explorer(four_square_axes, x_label=None, y_label=None, num_terms_semiotic_square=None, get_tooltip_content=None, x_axis_values=None, y_axis_values=None, color_func=None, axis_scaler=scale_neg_1_to_1_with_zero_mean, **kwargs): """ Produces a semiotic square visualization. Parameters ---------- four_square : FourSquareAxes The basis of the visualization x_label : str The x-axis label in the scatter plot. Relationship between `category_a` and `category_b`. y_label The y-axis label in the scatter plot. Relationship neutral term and complex term. not_b_category_name: str or None Name of neutral set of data. Defaults to "Extra". num_terms_semiotic_square : int or None 10 by default. Number of terms to show in semiotic square. get_tooltip_content : str or None Defaults to tooltip showing z-scores on both axes. x_axis_values : list, default None Value-labels to show on x-axis. [-2.58, -1.96, 0, 1.96, 2.58] is the default y_axis_values : list, default None Value-labels to show on y-axis. [-2.58, -1.96, 0, 1.96, 2.58] is the default color_func : str, default None Javascript function to control color of a point. Function takes a parameter which is a dictionary entry produced by `ScatterChartExplorer.to_dict` and returns a string. Defaults to RdYlBl on x-axis, and varying saturation on y-axis. axis_scaler : lambda, default scale_neg_1_to_1_with_zero_mean_abs_max Scale values to fit axis Remaining arguments are from `produce_scattertext_explorer`. Returns ------- str, html of visualization """ if x_label is None: x_label = four_square_axes.left_category_name_ + '-' + four_square_axes.right_category_name_ # depends on [control=['if'], data=['x_label']] if y_label is None: y_label = four_square_axes.top_category_name_ + '-' + four_square_axes.bottom_category_name_ # depends on [control=['if'], data=['y_label']] if get_tooltip_content is None: get_tooltip_content = '(function(d) {return d.term + "<br/>%s: " + Math.round(d.ox*1000)/1000+"<br/>%s: " + Math.round(d.oy*1000)/1000})' % (x_label, y_label) # depends on [control=['if'], data=['get_tooltip_content']] if color_func is None: # this desaturates # color_func = '(function(d) {var c = d3.hsl(d3.interpolateRdYlBu(d.x)); c.s *= d.y; return c;})' color_func = '(function(d) {return d3.interpolateRdYlBu(d.x)})' # depends on [control=['if'], data=['color_func']] axes = four_square_axes.get_axes() if 'scores' not in kwargs: kwargs['scores'] = -axes['x'] # depends on [control=['if'], data=['kwargs']] '\n my_scaler = scale_neg_1_to_1_with_zero_mean_abs_max\n if foveate:\n my_scaler = scale_neg_1_to_1_with_zero_mean_rank_abs_max\n ' return produce_scattertext_explorer(four_square_axes.term_doc_matrix_, category=four_square_axes.left_categories_[0], category_name=four_square_axes.left_category_name_, not_categories=four_square_axes.right_categories_, not_category_name=four_square_axes.right_category_name_, neutral_categories=four_square_axes.top_categories_, neutral_category_name=four_square_axes.top_category_name_, extra_categories=four_square_axes.bottom_categories_, extra_category_name=four_square_axes.bottom_category_name_, sort_by_dist=False, x_coords=axis_scaler(-axes['x']), y_coords=axis_scaler(axes['y']), original_x=-axes['x'], original_y=axes['y'], show_characteristic=False, show_top_terms=False, x_label=x_label, y_label=y_label, semiotic_square=four_square_axes, show_neutral=True, show_extra=True, num_terms_semiotic_square=num_terms_semiotic_square, get_tooltip_content=get_tooltip_content, x_axis_values=x_axis_values, y_axis_values=y_axis_values, color_func=color_func, show_axes=False, **kwargs)
def is_not_from_subdomain(self, response, site_dict): """ Ensures the response's url isn't from a subdomain. :param obj response: The scrapy response :param dict site_dict: The site object from the JSON-File :return bool: Determines if the response's url is from a subdomain """ root_url = re.sub(re_url_root, '', site_dict["url"]) return UrlExtractor.get_allowed_domain(response.url) == root_url
def function[is_not_from_subdomain, parameter[self, response, site_dict]]: constant[ Ensures the response's url isn't from a subdomain. :param obj response: The scrapy response :param dict site_dict: The site object from the JSON-File :return bool: Determines if the response's url is from a subdomain ] variable[root_url] assign[=] call[name[re].sub, parameter[name[re_url_root], constant[], call[name[site_dict]][constant[url]]]] return[compare[call[name[UrlExtractor].get_allowed_domain, parameter[name[response].url]] equal[==] name[root_url]]]
keyword[def] identifier[is_not_from_subdomain] ( identifier[self] , identifier[response] , identifier[site_dict] ): literal[string] identifier[root_url] = identifier[re] . identifier[sub] ( identifier[re_url_root] , literal[string] , identifier[site_dict] [ literal[string] ]) keyword[return] identifier[UrlExtractor] . identifier[get_allowed_domain] ( identifier[response] . identifier[url] )== identifier[root_url]
def is_not_from_subdomain(self, response, site_dict): """ Ensures the response's url isn't from a subdomain. :param obj response: The scrapy response :param dict site_dict: The site object from the JSON-File :return bool: Determines if the response's url is from a subdomain """ root_url = re.sub(re_url_root, '', site_dict['url']) return UrlExtractor.get_allowed_domain(response.url) == root_url
def _serialize(self, value, attr, obj, **kwargs): """Return a string if `self.as_string=True`, otherwise return this field's `num_type`.""" ret = self._validated(value) return ( self._to_string(ret) if (self.as_string and ret not in (None, missing_)) else ret )
def function[_serialize, parameter[self, value, attr, obj]]: constant[Return a string if `self.as_string=True`, otherwise return this field's `num_type`.] variable[ret] assign[=] call[name[self]._validated, parameter[name[value]]] return[<ast.IfExp object at 0x7da18dc04b50>]
keyword[def] identifier[_serialize] ( identifier[self] , identifier[value] , identifier[attr] , identifier[obj] ,** identifier[kwargs] ): literal[string] identifier[ret] = identifier[self] . identifier[_validated] ( identifier[value] ) keyword[return] ( identifier[self] . identifier[_to_string] ( identifier[ret] ) keyword[if] ( identifier[self] . identifier[as_string] keyword[and] identifier[ret] keyword[not] keyword[in] ( keyword[None] , identifier[missing_] )) keyword[else] identifier[ret] )
def _serialize(self, value, attr, obj, **kwargs): """Return a string if `self.as_string=True`, otherwise return this field's `num_type`.""" ret = self._validated(value) return self._to_string(ret) if self.as_string and ret not in (None, missing_) else ret
def add(self, *tasks): """ Interfaces the GraphNode `add` method """ nodes = [x.node for x in tasks] self.node.add(*nodes) return self
def function[add, parameter[self]]: constant[ Interfaces the GraphNode `add` method ] variable[nodes] assign[=] <ast.ListComp object at 0x7da1b26af5b0> call[name[self].node.add, parameter[<ast.Starred object at 0x7da1b26ae770>]] return[name[self]]
keyword[def] identifier[add] ( identifier[self] ,* identifier[tasks] ): literal[string] identifier[nodes] =[ identifier[x] . identifier[node] keyword[for] identifier[x] keyword[in] identifier[tasks] ] identifier[self] . identifier[node] . identifier[add] (* identifier[nodes] ) keyword[return] identifier[self]
def add(self, *tasks): """ Interfaces the GraphNode `add` method """ nodes = [x.node for x in tasks] self.node.add(*nodes) return self
def data_lookup_method(fields_list, mongo_db_obj, hist, record, lookup_type): """ Method to lookup the replacement value given a single input value from the same field. :param dict fields_list: Fields configurations :param MongoClient mongo_db_obj: MongoDB collection object :param dict hist: existing input of history values object :param dict record: values to validate :param str lookup_type: Type of lookup """ if hist is None: hist = {} for field in record: if record[field] != '' and record[field] is not None: if field in fields_list: if lookup_type in fields_list[field]['lookup']: field_val_new, hist = DataLookup( fieldVal=record[field], db=mongo_db_obj, lookupType=lookup_type, fieldName=field, histObj=hist) record[field] = field_val_new return record, hist
def function[data_lookup_method, parameter[fields_list, mongo_db_obj, hist, record, lookup_type]]: constant[ Method to lookup the replacement value given a single input value from the same field. :param dict fields_list: Fields configurations :param MongoClient mongo_db_obj: MongoDB collection object :param dict hist: existing input of history values object :param dict record: values to validate :param str lookup_type: Type of lookup ] if compare[name[hist] is constant[None]] begin[:] variable[hist] assign[=] dictionary[[], []] for taget[name[field]] in starred[name[record]] begin[:] if <ast.BoolOp object at 0x7da20c6c5b70> begin[:] if compare[name[field] in name[fields_list]] begin[:] if compare[name[lookup_type] in call[call[name[fields_list]][name[field]]][constant[lookup]]] begin[:] <ast.Tuple object at 0x7da18f7213c0> assign[=] call[name[DataLookup], parameter[]] call[name[record]][name[field]] assign[=] name[field_val_new] return[tuple[[<ast.Name object at 0x7da207f03820>, <ast.Name object at 0x7da207f03550>]]]
keyword[def] identifier[data_lookup_method] ( identifier[fields_list] , identifier[mongo_db_obj] , identifier[hist] , identifier[record] , identifier[lookup_type] ): literal[string] keyword[if] identifier[hist] keyword[is] keyword[None] : identifier[hist] ={} keyword[for] identifier[field] keyword[in] identifier[record] : keyword[if] identifier[record] [ identifier[field] ]!= literal[string] keyword[and] identifier[record] [ identifier[field] ] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[field] keyword[in] identifier[fields_list] : keyword[if] identifier[lookup_type] keyword[in] identifier[fields_list] [ identifier[field] ][ literal[string] ]: identifier[field_val_new] , identifier[hist] = identifier[DataLookup] ( identifier[fieldVal] = identifier[record] [ identifier[field] ], identifier[db] = identifier[mongo_db_obj] , identifier[lookupType] = identifier[lookup_type] , identifier[fieldName] = identifier[field] , identifier[histObj] = identifier[hist] ) identifier[record] [ identifier[field] ]= identifier[field_val_new] keyword[return] identifier[record] , identifier[hist]
def data_lookup_method(fields_list, mongo_db_obj, hist, record, lookup_type): """ Method to lookup the replacement value given a single input value from the same field. :param dict fields_list: Fields configurations :param MongoClient mongo_db_obj: MongoDB collection object :param dict hist: existing input of history values object :param dict record: values to validate :param str lookup_type: Type of lookup """ if hist is None: hist = {} # depends on [control=['if'], data=['hist']] for field in record: if record[field] != '' and record[field] is not None: if field in fields_list: if lookup_type in fields_list[field]['lookup']: (field_val_new, hist) = DataLookup(fieldVal=record[field], db=mongo_db_obj, lookupType=lookup_type, fieldName=field, histObj=hist) record[field] = field_val_new # depends on [control=['if'], data=['lookup_type']] # depends on [control=['if'], data=['field', 'fields_list']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field']] return (record, hist)
def first(self, timeout=None): """ Wait for the first successful result to become available :param timeout: Wait timeout, sec :type timeout: float|int|None :return: result, or None if all threads have failed :rtype: * """ while True: with self._jobfinished: if self._results or not self._jobs.unfinished_tasks: break self._jobfinished.wait(timeout) return self._results[0] if self._results else None
def function[first, parameter[self, timeout]]: constant[ Wait for the first successful result to become available :param timeout: Wait timeout, sec :type timeout: float|int|None :return: result, or None if all threads have failed :rtype: * ] while constant[True] begin[:] with name[self]._jobfinished begin[:] if <ast.BoolOp object at 0x7da20e9b12a0> begin[:] break call[name[self]._jobfinished.wait, parameter[name[timeout]]] return[<ast.IfExp object at 0x7da20e9b2fb0>]
keyword[def] identifier[first] ( identifier[self] , identifier[timeout] = keyword[None] ): literal[string] keyword[while] keyword[True] : keyword[with] identifier[self] . identifier[_jobfinished] : keyword[if] identifier[self] . identifier[_results] keyword[or] keyword[not] identifier[self] . identifier[_jobs] . identifier[unfinished_tasks] : keyword[break] identifier[self] . identifier[_jobfinished] . identifier[wait] ( identifier[timeout] ) keyword[return] identifier[self] . identifier[_results] [ literal[int] ] keyword[if] identifier[self] . identifier[_results] keyword[else] keyword[None]
def first(self, timeout=None): """ Wait for the first successful result to become available :param timeout: Wait timeout, sec :type timeout: float|int|None :return: result, or None if all threads have failed :rtype: * """ while True: with self._jobfinished: if self._results or not self._jobs.unfinished_tasks: break # depends on [control=['if'], data=[]] self._jobfinished.wait(timeout) # depends on [control=['with'], data=[]] # depends on [control=['while'], data=[]] return self._results[0] if self._results else None
def iter_all_dict_combinations_ordered(varied_dict): """ Same as all_dict_combinations but preserves order """ tups_list = [[(key, val) for val in val_list] for (key, val_list) in six.iteritems(varied_dict)] dict_iter = (OrderedDict(tups) for tups in it.product(*tups_list)) return dict_iter
def function[iter_all_dict_combinations_ordered, parameter[varied_dict]]: constant[ Same as all_dict_combinations but preserves order ] variable[tups_list] assign[=] <ast.ListComp object at 0x7da1b246b670> variable[dict_iter] assign[=] <ast.GeneratorExp object at 0x7da1b24e2a70> return[name[dict_iter]]
keyword[def] identifier[iter_all_dict_combinations_ordered] ( identifier[varied_dict] ): literal[string] identifier[tups_list] =[[( identifier[key] , identifier[val] ) keyword[for] identifier[val] keyword[in] identifier[val_list] ] keyword[for] ( identifier[key] , identifier[val_list] ) keyword[in] identifier[six] . identifier[iteritems] ( identifier[varied_dict] )] identifier[dict_iter] =( identifier[OrderedDict] ( identifier[tups] ) keyword[for] identifier[tups] keyword[in] identifier[it] . identifier[product] (* identifier[tups_list] )) keyword[return] identifier[dict_iter]
def iter_all_dict_combinations_ordered(varied_dict): """ Same as all_dict_combinations but preserves order """ tups_list = [[(key, val) for val in val_list] for (key, val_list) in six.iteritems(varied_dict)] dict_iter = (OrderedDict(tups) for tups in it.product(*tups_list)) return dict_iter
def load_images(self, search_file, source_file): """加载待匹配图片.""" self.search_file, self.source_file = search_file, source_file self.im_search, self.im_source = imread(self.search_file), imread(self.source_file) # 初始化对象 self.check_macthing_object = CheckKeypointResult(self.im_search, self.im_source)
def function[load_images, parameter[self, search_file, source_file]]: constant[加载待匹配图片.] <ast.Tuple object at 0x7da18dc98df0> assign[=] tuple[[<ast.Name object at 0x7da18dc99120>, <ast.Name object at 0x7da18dc9bb80>]] <ast.Tuple object at 0x7da18dc9ad10> assign[=] tuple[[<ast.Call object at 0x7da18dc99420>, <ast.Call object at 0x7da18dc9b7f0>]] name[self].check_macthing_object assign[=] call[name[CheckKeypointResult], parameter[name[self].im_search, name[self].im_source]]
keyword[def] identifier[load_images] ( identifier[self] , identifier[search_file] , identifier[source_file] ): literal[string] identifier[self] . identifier[search_file] , identifier[self] . identifier[source_file] = identifier[search_file] , identifier[source_file] identifier[self] . identifier[im_search] , identifier[self] . identifier[im_source] = identifier[imread] ( identifier[self] . identifier[search_file] ), identifier[imread] ( identifier[self] . identifier[source_file] ) identifier[self] . identifier[check_macthing_object] = identifier[CheckKeypointResult] ( identifier[self] . identifier[im_search] , identifier[self] . identifier[im_source] )
def load_images(self, search_file, source_file): """加载待匹配图片.""" (self.search_file, self.source_file) = (search_file, source_file) (self.im_search, self.im_source) = (imread(self.search_file), imread(self.source_file)) # 初始化对象 self.check_macthing_object = CheckKeypointResult(self.im_search, self.im_source)
def close(self): """Cleanup batch job.""" self.groups_shelf.close() self.indicators_shelf.close() if self.debug and self.enable_saved_file: fqfn = os.path.join(self.tcex.args.tc_temp_path, 'xids-saved') if os.path.isfile(fqfn): os.remove(fqfn) # remove previous file to prevent duplicates with open(fqfn, 'w') as fh: for xid in self.saved_xids: fh.write('{}\n'.format(xid)) else: # delete saved files if os.path.isfile(self.group_shelf_fqfn): os.remove(self.group_shelf_fqfn) if os.path.isfile(self.group_shelf_fqfn): os.remove(self.indicator_shelf_fqfn)
def function[close, parameter[self]]: constant[Cleanup batch job.] call[name[self].groups_shelf.close, parameter[]] call[name[self].indicators_shelf.close, parameter[]] if <ast.BoolOp object at 0x7da18fe92d40> begin[:] variable[fqfn] assign[=] call[name[os].path.join, parameter[name[self].tcex.args.tc_temp_path, constant[xids-saved]]] if call[name[os].path.isfile, parameter[name[fqfn]]] begin[:] call[name[os].remove, parameter[name[fqfn]]] with call[name[open], parameter[name[fqfn], constant[w]]] begin[:] for taget[name[xid]] in starred[name[self].saved_xids] begin[:] call[name[fh].write, parameter[call[constant[{} ].format, parameter[name[xid]]]]]
keyword[def] identifier[close] ( identifier[self] ): literal[string] identifier[self] . identifier[groups_shelf] . identifier[close] () identifier[self] . identifier[indicators_shelf] . identifier[close] () keyword[if] identifier[self] . identifier[debug] keyword[and] identifier[self] . identifier[enable_saved_file] : identifier[fqfn] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[tcex] . identifier[args] . identifier[tc_temp_path] , literal[string] ) keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[fqfn] ): identifier[os] . identifier[remove] ( identifier[fqfn] ) keyword[with] identifier[open] ( identifier[fqfn] , literal[string] ) keyword[as] identifier[fh] : keyword[for] identifier[xid] keyword[in] identifier[self] . identifier[saved_xids] : identifier[fh] . identifier[write] ( literal[string] . identifier[format] ( identifier[xid] )) keyword[else] : keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[self] . identifier[group_shelf_fqfn] ): identifier[os] . identifier[remove] ( identifier[self] . identifier[group_shelf_fqfn] ) keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[self] . identifier[group_shelf_fqfn] ): identifier[os] . identifier[remove] ( identifier[self] . identifier[indicator_shelf_fqfn] )
def close(self): """Cleanup batch job.""" self.groups_shelf.close() self.indicators_shelf.close() if self.debug and self.enable_saved_file: fqfn = os.path.join(self.tcex.args.tc_temp_path, 'xids-saved') if os.path.isfile(fqfn): os.remove(fqfn) # remove previous file to prevent duplicates # depends on [control=['if'], data=[]] with open(fqfn, 'w') as fh: for xid in self.saved_xids: fh.write('{}\n'.format(xid)) # depends on [control=['for'], data=['xid']] # depends on [control=['with'], data=['fh']] # depends on [control=['if'], data=[]] else: # delete saved files if os.path.isfile(self.group_shelf_fqfn): os.remove(self.group_shelf_fqfn) # depends on [control=['if'], data=[]] if os.path.isfile(self.group_shelf_fqfn): os.remove(self.indicator_shelf_fqfn) # depends on [control=['if'], data=[]]
def get_skyline(lrh): """ Wortst Time Complexity: O(NlogN) :type buildings: List[List[int]] :rtype: List[List[int]] """ skyline, live = [], [] i, n = 0, len(lrh) while i < n or live: if not live or i < n and lrh[i][0] <= -live[0][1]: x = lrh[i][0] while i < n and lrh[i][0] == x: heapq.heappush(live, (-lrh[i][2], -lrh[i][1])) i += 1 else: x = -live[0][1] while live and -live[0][1] <= x: heapq.heappop(live) height = len(live) and -live[0][0] if not skyline or height != skyline[-1][1]: skyline += [x, height], return skyline
def function[get_skyline, parameter[lrh]]: constant[ Wortst Time Complexity: O(NlogN) :type buildings: List[List[int]] :rtype: List[List[int]] ] <ast.Tuple object at 0x7da1b209b1c0> assign[=] tuple[[<ast.List object at 0x7da1b2098a60>, <ast.List object at 0x7da1b2098df0>]] <ast.Tuple object at 0x7da1b209b400> assign[=] tuple[[<ast.Constant object at 0x7da1b209b010>, <ast.Call object at 0x7da1b2099630>]] while <ast.BoolOp object at 0x7da1b2098e50> begin[:] if <ast.BoolOp object at 0x7da1b2098910> begin[:] variable[x] assign[=] call[call[name[lrh]][name[i]]][constant[0]] while <ast.BoolOp object at 0x7da1b201fbe0> begin[:] call[name[heapq].heappush, parameter[name[live], tuple[[<ast.UnaryOp object at 0x7da1b201ca90>, <ast.UnaryOp object at 0x7da1b201c430>]]]] <ast.AugAssign object at 0x7da1b201ff70> variable[height] assign[=] <ast.BoolOp object at 0x7da1b201c280> if <ast.BoolOp object at 0x7da1b201cc10> begin[:] <ast.AugAssign object at 0x7da1b20e43d0> return[name[skyline]]
keyword[def] identifier[get_skyline] ( identifier[lrh] ): literal[string] identifier[skyline] , identifier[live] =[],[] identifier[i] , identifier[n] = literal[int] , identifier[len] ( identifier[lrh] ) keyword[while] identifier[i] < identifier[n] keyword[or] identifier[live] : keyword[if] keyword[not] identifier[live] keyword[or] identifier[i] < identifier[n] keyword[and] identifier[lrh] [ identifier[i] ][ literal[int] ]<=- identifier[live] [ literal[int] ][ literal[int] ]: identifier[x] = identifier[lrh] [ identifier[i] ][ literal[int] ] keyword[while] identifier[i] < identifier[n] keyword[and] identifier[lrh] [ identifier[i] ][ literal[int] ]== identifier[x] : identifier[heapq] . identifier[heappush] ( identifier[live] ,(- identifier[lrh] [ identifier[i] ][ literal[int] ],- identifier[lrh] [ identifier[i] ][ literal[int] ])) identifier[i] += literal[int] keyword[else] : identifier[x] =- identifier[live] [ literal[int] ][ literal[int] ] keyword[while] identifier[live] keyword[and] - identifier[live] [ literal[int] ][ literal[int] ]<= identifier[x] : identifier[heapq] . identifier[heappop] ( identifier[live] ) identifier[height] = identifier[len] ( identifier[live] ) keyword[and] - identifier[live] [ literal[int] ][ literal[int] ] keyword[if] keyword[not] identifier[skyline] keyword[or] identifier[height] != identifier[skyline] [- literal[int] ][ literal[int] ]: identifier[skyline] +=[ identifier[x] , identifier[height] ], keyword[return] identifier[skyline]
def get_skyline(lrh): """ Wortst Time Complexity: O(NlogN) :type buildings: List[List[int]] :rtype: List[List[int]] """ (skyline, live) = ([], []) (i, n) = (0, len(lrh)) while i < n or live: if not live or (i < n and lrh[i][0] <= -live[0][1]): x = lrh[i][0] while i < n and lrh[i][0] == x: heapq.heappush(live, (-lrh[i][2], -lrh[i][1])) i += 1 # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]] else: x = -live[0][1] while live and -live[0][1] <= x: heapq.heappop(live) # depends on [control=['while'], data=[]] height = len(live) and -live[0][0] if not skyline or height != skyline[-1][1]: skyline += ([x, height],) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] return skyline
def _parse_field_text(field, conf, creds): """ Return form field value from 3 options: 1. If value key is present, render it as a Jinja2 template. The template has access to ``conf`` and ``creds`` dictionaries. 2. If creds key is present, use it as a dot-delimited path in creds dictionary (e.g. ``account.login``). 3. Otherwise return None """ value = field.get('value') if value: template = Template(value) return template.render( conf=conf, creds=creds, ) creds_path = field.get('creds') if creds_path: bread_crumbs = creds_path.split('.') node = creds for crumb in bread_crumbs: node = node[crumb] return node return None
def function[_parse_field_text, parameter[field, conf, creds]]: constant[ Return form field value from 3 options: 1. If value key is present, render it as a Jinja2 template. The template has access to ``conf`` and ``creds`` dictionaries. 2. If creds key is present, use it as a dot-delimited path in creds dictionary (e.g. ``account.login``). 3. Otherwise return None ] variable[value] assign[=] call[name[field].get, parameter[constant[value]]] if name[value] begin[:] variable[template] assign[=] call[name[Template], parameter[name[value]]] return[call[name[template].render, parameter[]]] variable[creds_path] assign[=] call[name[field].get, parameter[constant[creds]]] if name[creds_path] begin[:] variable[bread_crumbs] assign[=] call[name[creds_path].split, parameter[constant[.]]] variable[node] assign[=] name[creds] for taget[name[crumb]] in starred[name[bread_crumbs]] begin[:] variable[node] assign[=] call[name[node]][name[crumb]] return[name[node]] return[constant[None]]
keyword[def] identifier[_parse_field_text] ( identifier[field] , identifier[conf] , identifier[creds] ): literal[string] identifier[value] = identifier[field] . identifier[get] ( literal[string] ) keyword[if] identifier[value] : identifier[template] = identifier[Template] ( identifier[value] ) keyword[return] identifier[template] . identifier[render] ( identifier[conf] = identifier[conf] , identifier[creds] = identifier[creds] , ) identifier[creds_path] = identifier[field] . identifier[get] ( literal[string] ) keyword[if] identifier[creds_path] : identifier[bread_crumbs] = identifier[creds_path] . identifier[split] ( literal[string] ) identifier[node] = identifier[creds] keyword[for] identifier[crumb] keyword[in] identifier[bread_crumbs] : identifier[node] = identifier[node] [ identifier[crumb] ] keyword[return] identifier[node] keyword[return] keyword[None]
def _parse_field_text(field, conf, creds): """ Return form field value from 3 options: 1. If value key is present, render it as a Jinja2 template. The template has access to ``conf`` and ``creds`` dictionaries. 2. If creds key is present, use it as a dot-delimited path in creds dictionary (e.g. ``account.login``). 3. Otherwise return None """ value = field.get('value') if value: template = Template(value) return template.render(conf=conf, creds=creds) # depends on [control=['if'], data=[]] creds_path = field.get('creds') if creds_path: bread_crumbs = creds_path.split('.') node = creds for crumb in bread_crumbs: node = node[crumb] # depends on [control=['for'], data=['crumb']] return node # depends on [control=['if'], data=[]] return None
def _set_routing_profiletype(self, v, load=False): """ Setter method for routing_profiletype, mapped from YANG variable /rbridge_id/hardware_profile/route_table/predefined/routing_profiletype (routing-profile-subtype) If this variable is read-only (config: false) in the source YANG file, then _set_routing_profiletype is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_routing_profiletype() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'ipv4-min-v6': {'value': 3}, u'ipv6-max-route': {'value': 4}, u'default': {'value': 0}, u'ipv4-max-arp': {'value': 2}, u'ipv4-max-route': {'value': 1}, u'ipv6-max-nd': {'value': 5}},), is_leaf=True, yang_name="routing_profiletype", rest_name="routing_profiletype", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='routing-profile-subtype', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """routing_profiletype must be of a type compatible with routing-profile-subtype""", 'defined-type': "brocade-hardware:routing-profile-subtype", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'ipv4-min-v6': {'value': 3}, u'ipv6-max-route': {'value': 4}, u'default': {'value': 0}, u'ipv4-max-arp': {'value': 2}, u'ipv4-max-route': {'value': 1}, u'ipv6-max-nd': {'value': 5}},), is_leaf=True, yang_name="routing_profiletype", rest_name="routing_profiletype", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='routing-profile-subtype', is_config=True)""", }) self.__routing_profiletype = t if hasattr(self, '_set'): self._set()
def function[_set_routing_profiletype, parameter[self, v, load]]: constant[ Setter method for routing_profiletype, mapped from YANG variable /rbridge_id/hardware_profile/route_table/predefined/routing_profiletype (routing-profile-subtype) If this variable is read-only (config: false) in the source YANG file, then _set_routing_profiletype is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_routing_profiletype() directly. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da1b2596aa0> name[self].__routing_profiletype assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_routing_profiletype] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[unicode] , identifier[restriction_type] = literal[string] , identifier[restriction_arg] ={ literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }, literal[string] :{ literal[string] : literal[int] }},), identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : keyword[None] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__routing_profiletype] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_routing_profiletype(self, v, load=False): """ Setter method for routing_profiletype, mapped from YANG variable /rbridge_id/hardware_profile/route_table/predefined/routing_profiletype (routing-profile-subtype) If this variable is read-only (config: false) in the source YANG file, then _set_routing_profiletype is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_routing_profiletype() directly. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=RestrictedClassType(base_type=unicode, restriction_type='dict_key', restriction_arg={u'ipv4-min-v6': {'value': 3}, u'ipv6-max-route': {'value': 4}, u'default': {'value': 0}, u'ipv4-max-arp': {'value': 2}, u'ipv4-max-route': {'value': 1}, u'ipv6-max-nd': {'value': 5}}), is_leaf=True, yang_name='routing_profiletype', rest_name='routing_profiletype', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-hardware', defining_module='brocade-hardware', yang_type='routing-profile-subtype', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'routing_profiletype must be of a type compatible with routing-profile-subtype', 'defined-type': 'brocade-hardware:routing-profile-subtype', 'generated-type': 'YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u\'ipv4-min-v6\': {\'value\': 3}, u\'ipv6-max-route\': {\'value\': 4}, u\'default\': {\'value\': 0}, u\'ipv4-max-arp\': {\'value\': 2}, u\'ipv4-max-route\': {\'value\': 1}, u\'ipv6-max-nd\': {\'value\': 5}},), is_leaf=True, yang_name="routing_profiletype", rest_name="routing_profiletype", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'cli-drop-node-name\': None, u\'cli-suppress-no\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-hardware\', defining_module=\'brocade-hardware\', yang_type=\'routing-profile-subtype\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__routing_profiletype = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def parse_value_reference(self, tup_tree): """ Parse a VALUE.REFERENCE element and return the instance path or class path it represents as a CIMInstanceName or CIMClassName object, respectively. :: <!ELEMENT VALUE.REFERENCE (CLASSPATH | LOCALCLASSPATH | CLASSNAME | INSTANCEPATH | LOCALINSTANCEPATH | INSTANCENAME)> """ self.check_node(tup_tree, 'VALUE.REFERENCE') child = self.one_child(tup_tree, ('CLASSPATH', 'LOCALCLASSPATH', 'CLASSNAME', 'INSTANCEPATH', 'LOCALINSTANCEPATH', 'INSTANCENAME')) return child
def function[parse_value_reference, parameter[self, tup_tree]]: constant[ Parse a VALUE.REFERENCE element and return the instance path or class path it represents as a CIMInstanceName or CIMClassName object, respectively. :: <!ELEMENT VALUE.REFERENCE (CLASSPATH | LOCALCLASSPATH | CLASSNAME | INSTANCEPATH | LOCALINSTANCEPATH | INSTANCENAME)> ] call[name[self].check_node, parameter[name[tup_tree], constant[VALUE.REFERENCE]]] variable[child] assign[=] call[name[self].one_child, parameter[name[tup_tree], tuple[[<ast.Constant object at 0x7da18bccb9a0>, <ast.Constant object at 0x7da18bcc9f00>, <ast.Constant object at 0x7da18bccb5e0>, <ast.Constant object at 0x7da18bcc8be0>, <ast.Constant object at 0x7da18bccad40>, <ast.Constant object at 0x7da18bccb880>]]]] return[name[child]]
keyword[def] identifier[parse_value_reference] ( identifier[self] , identifier[tup_tree] ): literal[string] identifier[self] . identifier[check_node] ( identifier[tup_tree] , literal[string] ) identifier[child] = identifier[self] . identifier[one_child] ( identifier[tup_tree] , ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] )) keyword[return] identifier[child]
def parse_value_reference(self, tup_tree): """ Parse a VALUE.REFERENCE element and return the instance path or class path it represents as a CIMInstanceName or CIMClassName object, respectively. :: <!ELEMENT VALUE.REFERENCE (CLASSPATH | LOCALCLASSPATH | CLASSNAME | INSTANCEPATH | LOCALINSTANCEPATH | INSTANCENAME)> """ self.check_node(tup_tree, 'VALUE.REFERENCE') child = self.one_child(tup_tree, ('CLASSPATH', 'LOCALCLASSPATH', 'CLASSNAME', 'INSTANCEPATH', 'LOCALINSTANCEPATH', 'INSTANCENAME')) return child
def write_hdf5_series(series, output, path=None, attrs=None, **kwargs): """Write a Series to HDF5. See :func:`write_hdf5_array` for details of arguments and keywords. """ if attrs is None: attrs = format_index_array_attrs(series) return write_hdf5_array(series, output, path=path, attrs=attrs, **kwargs)
def function[write_hdf5_series, parameter[series, output, path, attrs]]: constant[Write a Series to HDF5. See :func:`write_hdf5_array` for details of arguments and keywords. ] if compare[name[attrs] is constant[None]] begin[:] variable[attrs] assign[=] call[name[format_index_array_attrs], parameter[name[series]]] return[call[name[write_hdf5_array], parameter[name[series], name[output]]]]
keyword[def] identifier[write_hdf5_series] ( identifier[series] , identifier[output] , identifier[path] = keyword[None] , identifier[attrs] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[attrs] keyword[is] keyword[None] : identifier[attrs] = identifier[format_index_array_attrs] ( identifier[series] ) keyword[return] identifier[write_hdf5_array] ( identifier[series] , identifier[output] , identifier[path] = identifier[path] , identifier[attrs] = identifier[attrs] ,** identifier[kwargs] )
def write_hdf5_series(series, output, path=None, attrs=None, **kwargs): """Write a Series to HDF5. See :func:`write_hdf5_array` for details of arguments and keywords. """ if attrs is None: attrs = format_index_array_attrs(series) # depends on [control=['if'], data=['attrs']] return write_hdf5_array(series, output, path=path, attrs=attrs, **kwargs)
def get_img_attrs(self, style=None, **kwargs): """ Get an attribute list (src, srcset, style, et al) for the image. style -- an optional list of CSS style fragments Returns: a dict of attributes e.g. {'src':'foo.jpg','srcset':'foo.jpg 1x, bar.jpg 2x'] """ add = {} if 'prefix' in kwargs: attr_prefixes = kwargs.get('prefix') if isinstance(kwargs['prefix'], str): attr_prefixes = [attr_prefixes] for prefix in attr_prefixes: for k, val in kwargs.items(): if k.startswith(prefix): add[k[len(prefix):]] = val return self._get_img_attrs(style, {**kwargs, **add})
def function[get_img_attrs, parameter[self, style]]: constant[ Get an attribute list (src, srcset, style, et al) for the image. style -- an optional list of CSS style fragments Returns: a dict of attributes e.g. {'src':'foo.jpg','srcset':'foo.jpg 1x, bar.jpg 2x'] ] variable[add] assign[=] dictionary[[], []] if compare[constant[prefix] in name[kwargs]] begin[:] variable[attr_prefixes] assign[=] call[name[kwargs].get, parameter[constant[prefix]]] if call[name[isinstance], parameter[call[name[kwargs]][constant[prefix]], name[str]]] begin[:] variable[attr_prefixes] assign[=] list[[<ast.Name object at 0x7da1b26adf30>]] for taget[name[prefix]] in starred[name[attr_prefixes]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b26ad7b0>, <ast.Name object at 0x7da1b26ac250>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:] if call[name[k].startswith, parameter[name[prefix]]] begin[:] call[name[add]][call[name[k]][<ast.Slice object at 0x7da1b26afdc0>]] assign[=] name[val] return[call[name[self]._get_img_attrs, parameter[name[style], dictionary[[None, None], [<ast.Name object at 0x7da1b26aefe0>, <ast.Name object at 0x7da1b26af670>]]]]]
keyword[def] identifier[get_img_attrs] ( identifier[self] , identifier[style] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[add] ={} keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[attr_prefixes] = identifier[kwargs] . identifier[get] ( literal[string] ) keyword[if] identifier[isinstance] ( identifier[kwargs] [ literal[string] ], identifier[str] ): identifier[attr_prefixes] =[ identifier[attr_prefixes] ] keyword[for] identifier[prefix] keyword[in] identifier[attr_prefixes] : keyword[for] identifier[k] , identifier[val] keyword[in] identifier[kwargs] . identifier[items] (): keyword[if] identifier[k] . identifier[startswith] ( identifier[prefix] ): identifier[add] [ identifier[k] [ identifier[len] ( identifier[prefix] ):]]= identifier[val] keyword[return] identifier[self] . identifier[_get_img_attrs] ( identifier[style] ,{** identifier[kwargs] ,** identifier[add] })
def get_img_attrs(self, style=None, **kwargs): """ Get an attribute list (src, srcset, style, et al) for the image. style -- an optional list of CSS style fragments Returns: a dict of attributes e.g. {'src':'foo.jpg','srcset':'foo.jpg 1x, bar.jpg 2x'] """ add = {} if 'prefix' in kwargs: attr_prefixes = kwargs.get('prefix') if isinstance(kwargs['prefix'], str): attr_prefixes = [attr_prefixes] # depends on [control=['if'], data=[]] for prefix in attr_prefixes: for (k, val) in kwargs.items(): if k.startswith(prefix): add[k[len(prefix):]] = val # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['prefix']] # depends on [control=['if'], data=['kwargs']] return self._get_img_attrs(style, {**kwargs, **add})
def _on_completion(self): """ Checks to see if the Job has completed, and cleans up if it has. """ logger.debug('Job {0} running _on_completion check'.format(self.name)) if self.state.status != 'running' or (not self._is_complete()): return for job, results in self.run_log['tasks'].iteritems(): if results.get('success', False) == False: self._set_status('failed') try: self.backend.acquire_lock() if self.event_handler: self.event_handler.emit('job_failed', self._serialize(include_run_logs=True)) except: logger.exception("Error in handling events.") finally: self.backend.release_lock() break if self.state.status != 'failed': self._set_status('waiting') self.run_log = {} try: self.backend.acquire_lock() if self.event_handler: self.event_handler.emit('job_complete', self._serialize(include_run_logs=True)) except: logger.exception("Error in handling events.") finally: self.backend.release_lock() self.destroy_snapshot()
def function[_on_completion, parameter[self]]: constant[ Checks to see if the Job has completed, and cleans up if it has. ] call[name[logger].debug, parameter[call[constant[Job {0} running _on_completion check].format, parameter[name[self].name]]]] if <ast.BoolOp object at 0x7da1b0ba1ae0> begin[:] return[None] for taget[tuple[[<ast.Name object at 0x7da1b0ba33a0>, <ast.Name object at 0x7da1b0ba3370>]]] in starred[call[call[name[self].run_log][constant[tasks]].iteritems, parameter[]]] begin[:] if compare[call[name[results].get, parameter[constant[success], constant[False]]] equal[==] constant[False]] begin[:] call[name[self]._set_status, parameter[constant[failed]]] <ast.Try object at 0x7da1b0bdb040> break if compare[name[self].state.status not_equal[!=] constant[failed]] begin[:] call[name[self]._set_status, parameter[constant[waiting]]] name[self].run_log assign[=] dictionary[[], []] <ast.Try object at 0x7da1b0bd9f60> call[name[self].destroy_snapshot, parameter[]]
keyword[def] identifier[_on_completion] ( identifier[self] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[self] . identifier[name] )) keyword[if] identifier[self] . identifier[state] . identifier[status] != literal[string] keyword[or] ( keyword[not] identifier[self] . identifier[_is_complete] ()): keyword[return] keyword[for] identifier[job] , identifier[results] keyword[in] identifier[self] . identifier[run_log] [ literal[string] ]. identifier[iteritems] (): keyword[if] identifier[results] . identifier[get] ( literal[string] , keyword[False] )== keyword[False] : identifier[self] . identifier[_set_status] ( literal[string] ) keyword[try] : identifier[self] . identifier[backend] . identifier[acquire_lock] () keyword[if] identifier[self] . identifier[event_handler] : identifier[self] . identifier[event_handler] . identifier[emit] ( literal[string] , identifier[self] . identifier[_serialize] ( identifier[include_run_logs] = keyword[True] )) keyword[except] : identifier[logger] . identifier[exception] ( literal[string] ) keyword[finally] : identifier[self] . identifier[backend] . identifier[release_lock] () keyword[break] keyword[if] identifier[self] . identifier[state] . identifier[status] != literal[string] : identifier[self] . identifier[_set_status] ( literal[string] ) identifier[self] . identifier[run_log] ={} keyword[try] : identifier[self] . identifier[backend] . identifier[acquire_lock] () keyword[if] identifier[self] . identifier[event_handler] : identifier[self] . identifier[event_handler] . identifier[emit] ( literal[string] , identifier[self] . identifier[_serialize] ( identifier[include_run_logs] = keyword[True] )) keyword[except] : identifier[logger] . identifier[exception] ( literal[string] ) keyword[finally] : identifier[self] . identifier[backend] . identifier[release_lock] () identifier[self] . identifier[destroy_snapshot] ()
def _on_completion(self): """ Checks to see if the Job has completed, and cleans up if it has. """ logger.debug('Job {0} running _on_completion check'.format(self.name)) if self.state.status != 'running' or not self._is_complete(): return # depends on [control=['if'], data=[]] for (job, results) in self.run_log['tasks'].iteritems(): if results.get('success', False) == False: self._set_status('failed') try: self.backend.acquire_lock() if self.event_handler: self.event_handler.emit('job_failed', self._serialize(include_run_logs=True)) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except: logger.exception('Error in handling events.') # depends on [control=['except'], data=[]] finally: self.backend.release_lock() break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] if self.state.status != 'failed': self._set_status('waiting') self.run_log = {} try: self.backend.acquire_lock() if self.event_handler: self.event_handler.emit('job_complete', self._serialize(include_run_logs=True)) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except: logger.exception('Error in handling events.') # depends on [control=['except'], data=[]] finally: self.backend.release_lock() # depends on [control=['if'], data=[]] self.destroy_snapshot()
def switch_on(self): """Turn the switch on.""" success = self.set_status(CONST.STATUS_OPEN_INT) if success: self._json_state['status'] = CONST.STATUS_OPEN return success
def function[switch_on, parameter[self]]: constant[Turn the switch on.] variable[success] assign[=] call[name[self].set_status, parameter[name[CONST].STATUS_OPEN_INT]] if name[success] begin[:] call[name[self]._json_state][constant[status]] assign[=] name[CONST].STATUS_OPEN return[name[success]]
keyword[def] identifier[switch_on] ( identifier[self] ): literal[string] identifier[success] = identifier[self] . identifier[set_status] ( identifier[CONST] . identifier[STATUS_OPEN_INT] ) keyword[if] identifier[success] : identifier[self] . identifier[_json_state] [ literal[string] ]= identifier[CONST] . identifier[STATUS_OPEN] keyword[return] identifier[success]
def switch_on(self): """Turn the switch on.""" success = self.set_status(CONST.STATUS_OPEN_INT) if success: self._json_state['status'] = CONST.STATUS_OPEN # depends on [control=['if'], data=[]] return success
def cap(self, subcommand, *args): """ Send a CAP command according to `the spec <http://ircv3.atheme.org/specification/capability-negotiation-3.1>`_. Arguments: subcommand -- LS, LIST, REQ, ACK, CLEAR, END args -- capabilities, if required for given subcommand Example: .cap('LS') .cap('REQ', 'multi-prefix', 'sasl') .cap('END') """ cap_subcommands = set('LS LIST REQ ACK NAK CLEAR END'.split()) client_subcommands = set(cap_subcommands) - {'NAK'} assert subcommand in client_subcommands, "invalid subcommand" def _multi_parameter(args): """ According to the spec:: If more than one capability is named, the RFC1459 designated sentinel (:) for a multi-parameter argument must be present. It's not obvious where the sentinel should be present or if it must be omitted for a single parameter, so follow convention and only include the sentinel prefixed to the first parameter if more than one parameter is present. """ if len(args) > 1: return (':' + args[0],) + args[1:] return args self.send_items('CAP', subcommand, *_multi_parameter(args))
def function[cap, parameter[self, subcommand]]: constant[ Send a CAP command according to `the spec <http://ircv3.atheme.org/specification/capability-negotiation-3.1>`_. Arguments: subcommand -- LS, LIST, REQ, ACK, CLEAR, END args -- capabilities, if required for given subcommand Example: .cap('LS') .cap('REQ', 'multi-prefix', 'sasl') .cap('END') ] variable[cap_subcommands] assign[=] call[name[set], parameter[call[constant[LS LIST REQ ACK NAK CLEAR END].split, parameter[]]]] variable[client_subcommands] assign[=] binary_operation[call[name[set], parameter[name[cap_subcommands]]] - <ast.Set object at 0x7da2049606d0>] assert[compare[name[subcommand] in name[client_subcommands]]] def function[_multi_parameter, parameter[args]]: constant[ According to the spec:: If more than one capability is named, the RFC1459 designated sentinel (:) for a multi-parameter argument must be present. It's not obvious where the sentinel should be present or if it must be omitted for a single parameter, so follow convention and only include the sentinel prefixed to the first parameter if more than one parameter is present. ] if compare[call[name[len], parameter[name[args]]] greater[>] constant[1]] begin[:] return[binary_operation[tuple[[<ast.BinOp object at 0x7da204961bd0>]] + call[name[args]][<ast.Slice object at 0x7da204961c60>]]] return[name[args]] call[name[self].send_items, parameter[constant[CAP], name[subcommand], <ast.Starred object at 0x7da204960940>]]
keyword[def] identifier[cap] ( identifier[self] , identifier[subcommand] ,* identifier[args] ): literal[string] identifier[cap_subcommands] = identifier[set] ( literal[string] . identifier[split] ()) identifier[client_subcommands] = identifier[set] ( identifier[cap_subcommands] )-{ literal[string] } keyword[assert] identifier[subcommand] keyword[in] identifier[client_subcommands] , literal[string] keyword[def] identifier[_multi_parameter] ( identifier[args] ): literal[string] keyword[if] identifier[len] ( identifier[args] )> literal[int] : keyword[return] ( literal[string] + identifier[args] [ literal[int] ],)+ identifier[args] [ literal[int] :] keyword[return] identifier[args] identifier[self] . identifier[send_items] ( literal[string] , identifier[subcommand] ,* identifier[_multi_parameter] ( identifier[args] ))
def cap(self, subcommand, *args): """ Send a CAP command according to `the spec <http://ircv3.atheme.org/specification/capability-negotiation-3.1>`_. Arguments: subcommand -- LS, LIST, REQ, ACK, CLEAR, END args -- capabilities, if required for given subcommand Example: .cap('LS') .cap('REQ', 'multi-prefix', 'sasl') .cap('END') """ cap_subcommands = set('LS LIST REQ ACK NAK CLEAR END'.split()) client_subcommands = set(cap_subcommands) - {'NAK'} assert subcommand in client_subcommands, 'invalid subcommand' def _multi_parameter(args): """ According to the spec:: If more than one capability is named, the RFC1459 designated sentinel (:) for a multi-parameter argument must be present. It's not obvious where the sentinel should be present or if it must be omitted for a single parameter, so follow convention and only include the sentinel prefixed to the first parameter if more than one parameter is present. """ if len(args) > 1: return (':' + args[0],) + args[1:] # depends on [control=['if'], data=[]] return args self.send_items('CAP', subcommand, *_multi_parameter(args))
def check_platform_variables(self, ds): ''' The value of platform attribute should be set to another variable which contains the details of the platform. There can be multiple platforms involved depending on if all the instances of the featureType in the collection share the same platform or not. If multiple platforms are involved, a variable should be defined for each platform and referenced from the geophysical variable in a space separated string. :param netCDF4.Dataset ds: An open netCDF dataset ''' platform_names = getattr(ds, 'platform', '').split(' ') val = all(platform_name in ds.variables for platform_name in platform_names) msgs = [] if not val: msgs = [('The value of "platform" global attribute should be set to another variable ' 'which contains the details of the platform. If multiple platforms are ' 'involved, a variable should be defined for each platform and referenced ' 'from the geophysical variable in a space separated string.')] return [Result(BaseCheck.HIGH, val, 'platform variables', msgs)]
def function[check_platform_variables, parameter[self, ds]]: constant[ The value of platform attribute should be set to another variable which contains the details of the platform. There can be multiple platforms involved depending on if all the instances of the featureType in the collection share the same platform or not. If multiple platforms are involved, a variable should be defined for each platform and referenced from the geophysical variable in a space separated string. :param netCDF4.Dataset ds: An open netCDF dataset ] variable[platform_names] assign[=] call[call[name[getattr], parameter[name[ds], constant[platform], constant[]]].split, parameter[constant[ ]]] variable[val] assign[=] call[name[all], parameter[<ast.GeneratorExp object at 0x7da2054a4550>]] variable[msgs] assign[=] list[[]] if <ast.UnaryOp object at 0x7da2054a4640> begin[:] variable[msgs] assign[=] list[[<ast.Constant object at 0x7da2054a52a0>]] return[list[[<ast.Call object at 0x7da2054a7f10>]]]
keyword[def] identifier[check_platform_variables] ( identifier[self] , identifier[ds] ): literal[string] identifier[platform_names] = identifier[getattr] ( identifier[ds] , literal[string] , literal[string] ). identifier[split] ( literal[string] ) identifier[val] = identifier[all] ( identifier[platform_name] keyword[in] identifier[ds] . identifier[variables] keyword[for] identifier[platform_name] keyword[in] identifier[platform_names] ) identifier[msgs] =[] keyword[if] keyword[not] identifier[val] : identifier[msgs] =[( literal[string] literal[string] literal[string] literal[string] )] keyword[return] [ identifier[Result] ( identifier[BaseCheck] . identifier[HIGH] , identifier[val] , literal[string] , identifier[msgs] )]
def check_platform_variables(self, ds): """ The value of platform attribute should be set to another variable which contains the details of the platform. There can be multiple platforms involved depending on if all the instances of the featureType in the collection share the same platform or not. If multiple platforms are involved, a variable should be defined for each platform and referenced from the geophysical variable in a space separated string. :param netCDF4.Dataset ds: An open netCDF dataset """ platform_names = getattr(ds, 'platform', '').split(' ') val = all((platform_name in ds.variables for platform_name in platform_names)) msgs = [] if not val: msgs = ['The value of "platform" global attribute should be set to another variable which contains the details of the platform. If multiple platforms are involved, a variable should be defined for each platform and referenced from the geophysical variable in a space separated string.'] # depends on [control=['if'], data=[]] return [Result(BaseCheck.HIGH, val, 'platform variables', msgs)]
def set_membind(nodemask): """ Sets the memory allocation mask. The thread will only allocate memory from the nodes set in nodemask. @param nodemask: node mask @type nodemask: C{set} """ mask = set_to_numa_nodemask(nodemask) tmp = bitmask_t() tmp.maskp = cast(byref(mask), POINTER(c_ulong)) tmp.size = sizeof(nodemask_t) * 8 libnuma.numa_set_membind(byref(tmp))
def function[set_membind, parameter[nodemask]]: constant[ Sets the memory allocation mask. The thread will only allocate memory from the nodes set in nodemask. @param nodemask: node mask @type nodemask: C{set} ] variable[mask] assign[=] call[name[set_to_numa_nodemask], parameter[name[nodemask]]] variable[tmp] assign[=] call[name[bitmask_t], parameter[]] name[tmp].maskp assign[=] call[name[cast], parameter[call[name[byref], parameter[name[mask]]], call[name[POINTER], parameter[name[c_ulong]]]]] name[tmp].size assign[=] binary_operation[call[name[sizeof], parameter[name[nodemask_t]]] * constant[8]] call[name[libnuma].numa_set_membind, parameter[call[name[byref], parameter[name[tmp]]]]]
keyword[def] identifier[set_membind] ( identifier[nodemask] ): literal[string] identifier[mask] = identifier[set_to_numa_nodemask] ( identifier[nodemask] ) identifier[tmp] = identifier[bitmask_t] () identifier[tmp] . identifier[maskp] = identifier[cast] ( identifier[byref] ( identifier[mask] ), identifier[POINTER] ( identifier[c_ulong] )) identifier[tmp] . identifier[size] = identifier[sizeof] ( identifier[nodemask_t] )* literal[int] identifier[libnuma] . identifier[numa_set_membind] ( identifier[byref] ( identifier[tmp] ))
def set_membind(nodemask): """ Sets the memory allocation mask. The thread will only allocate memory from the nodes set in nodemask. @param nodemask: node mask @type nodemask: C{set} """ mask = set_to_numa_nodemask(nodemask) tmp = bitmask_t() tmp.maskp = cast(byref(mask), POINTER(c_ulong)) tmp.size = sizeof(nodemask_t) * 8 libnuma.numa_set_membind(byref(tmp))
def handle_user(self, params): """ Handle the USER command which identifies the user to the server. """ params = params.split(' ', 3) if len(params) != 4: raise IRCError.from_name( 'needmoreparams', 'USER :Not enough parameters') user, mode, unused, realname = params self.user = user self.mode = mode self.realname = realname return ''
def function[handle_user, parameter[self, params]]: constant[ Handle the USER command which identifies the user to the server. ] variable[params] assign[=] call[name[params].split, parameter[constant[ ], constant[3]]] if compare[call[name[len], parameter[name[params]]] not_equal[!=] constant[4]] begin[:] <ast.Raise object at 0x7da204566da0> <ast.Tuple object at 0x7da204564730> assign[=] name[params] name[self].user assign[=] name[user] name[self].mode assign[=] name[mode] name[self].realname assign[=] name[realname] return[constant[]]
keyword[def] identifier[handle_user] ( identifier[self] , identifier[params] ): literal[string] identifier[params] = identifier[params] . identifier[split] ( literal[string] , literal[int] ) keyword[if] identifier[len] ( identifier[params] )!= literal[int] : keyword[raise] identifier[IRCError] . identifier[from_name] ( literal[string] , literal[string] ) identifier[user] , identifier[mode] , identifier[unused] , identifier[realname] = identifier[params] identifier[self] . identifier[user] = identifier[user] identifier[self] . identifier[mode] = identifier[mode] identifier[self] . identifier[realname] = identifier[realname] keyword[return] literal[string]
def handle_user(self, params): """ Handle the USER command which identifies the user to the server. """ params = params.split(' ', 3) if len(params) != 4: raise IRCError.from_name('needmoreparams', 'USER :Not enough parameters') # depends on [control=['if'], data=[]] (user, mode, unused, realname) = params self.user = user self.mode = mode self.realname = realname return ''
def set_storage_data(self, storage_address, offset, value): """ Writes a value to a storage slot in specified account :param storage_address: an account address :param offset: the storage slot to use. :type offset: int or BitVec :param value: the value to write :type value: int or BitVec """ self._world_state[storage_address]['storage'][offset] = value
def function[set_storage_data, parameter[self, storage_address, offset, value]]: constant[ Writes a value to a storage slot in specified account :param storage_address: an account address :param offset: the storage slot to use. :type offset: int or BitVec :param value: the value to write :type value: int or BitVec ] call[call[call[name[self]._world_state][name[storage_address]]][constant[storage]]][name[offset]] assign[=] name[value]
keyword[def] identifier[set_storage_data] ( identifier[self] , identifier[storage_address] , identifier[offset] , identifier[value] ): literal[string] identifier[self] . identifier[_world_state] [ identifier[storage_address] ][ literal[string] ][ identifier[offset] ]= identifier[value]
def set_storage_data(self, storage_address, offset, value): """ Writes a value to a storage slot in specified account :param storage_address: an account address :param offset: the storage slot to use. :type offset: int or BitVec :param value: the value to write :type value: int or BitVec """ self._world_state[storage_address]['storage'][offset] = value
def deserialize_instance(serialized_instance): """ Deserialize Django model instance """ model_name, pk = serialized_instance.split(':') model = apps.get_model(model_name) return model._default_manager.get(pk=pk)
def function[deserialize_instance, parameter[serialized_instance]]: constant[ Deserialize Django model instance ] <ast.Tuple object at 0x7da1b0f38100> assign[=] call[name[serialized_instance].split, parameter[constant[:]]] variable[model] assign[=] call[name[apps].get_model, parameter[name[model_name]]] return[call[name[model]._default_manager.get, parameter[]]]
keyword[def] identifier[deserialize_instance] ( identifier[serialized_instance] ): literal[string] identifier[model_name] , identifier[pk] = identifier[serialized_instance] . identifier[split] ( literal[string] ) identifier[model] = identifier[apps] . identifier[get_model] ( identifier[model_name] ) keyword[return] identifier[model] . identifier[_default_manager] . identifier[get] ( identifier[pk] = identifier[pk] )
def deserialize_instance(serialized_instance): """ Deserialize Django model instance """ (model_name, pk) = serialized_instance.split(':') model = apps.get_model(model_name) return model._default_manager.get(pk=pk)
def assign_properties(thing): """Assign properties to an object. When creating something via a post request (e.g. a node), you can pass the properties of the object in the request. This function gets those values from the request and fills in the relevant columns of the table. """ details = request_parameter(parameter="details", optional=True) if details: setattr(thing, "details", loads(details)) for p in range(5): property_name = "property" + str(p + 1) property = request_parameter(parameter=property_name, optional=True) if property: setattr(thing, property_name, property) session.commit()
def function[assign_properties, parameter[thing]]: constant[Assign properties to an object. When creating something via a post request (e.g. a node), you can pass the properties of the object in the request. This function gets those values from the request and fills in the relevant columns of the table. ] variable[details] assign[=] call[name[request_parameter], parameter[]] if name[details] begin[:] call[name[setattr], parameter[name[thing], constant[details], call[name[loads], parameter[name[details]]]]] for taget[name[p]] in starred[call[name[range], parameter[constant[5]]]] begin[:] variable[property_name] assign[=] binary_operation[constant[property] + call[name[str], parameter[binary_operation[name[p] + constant[1]]]]] variable[property] assign[=] call[name[request_parameter], parameter[]] if name[property] begin[:] call[name[setattr], parameter[name[thing], name[property_name], name[property]]] call[name[session].commit, parameter[]]
keyword[def] identifier[assign_properties] ( identifier[thing] ): literal[string] identifier[details] = identifier[request_parameter] ( identifier[parameter] = literal[string] , identifier[optional] = keyword[True] ) keyword[if] identifier[details] : identifier[setattr] ( identifier[thing] , literal[string] , identifier[loads] ( identifier[details] )) keyword[for] identifier[p] keyword[in] identifier[range] ( literal[int] ): identifier[property_name] = literal[string] + identifier[str] ( identifier[p] + literal[int] ) identifier[property] = identifier[request_parameter] ( identifier[parameter] = identifier[property_name] , identifier[optional] = keyword[True] ) keyword[if] identifier[property] : identifier[setattr] ( identifier[thing] , identifier[property_name] , identifier[property] ) identifier[session] . identifier[commit] ()
def assign_properties(thing): """Assign properties to an object. When creating something via a post request (e.g. a node), you can pass the properties of the object in the request. This function gets those values from the request and fills in the relevant columns of the table. """ details = request_parameter(parameter='details', optional=True) if details: setattr(thing, 'details', loads(details)) # depends on [control=['if'], data=[]] for p in range(5): property_name = 'property' + str(p + 1) property = request_parameter(parameter=property_name, optional=True) if property: setattr(thing, property_name, property) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['p']] session.commit()
def _sndrcv_rcv(pks, hsent, stopevent, nbrecv, notans, verbose, chainCC, multi, _storage_policy=None): """Function used to receive packets and check their hashret""" if not _storage_policy: _storage_policy = lambda x, y: (x, y) ans = [] def _get_pkt(): # SuperSocket.select() returns, according to each socket type, # the selected sockets + the function to recv() the packets (or None) # (when sockets aren't selectable, should be nonblock_recv) selected, read_func = pks.select([pks]) read_func = read_func or pks.__class__.recv if selected: return read_func(selected[0]) try: while True: r = _get_pkt() if stopevent.is_set(): break if r is None: continue ok = False h = r.hashret() if h in hsent: hlst = hsent[h] for i, sentpkt in enumerate(hlst): if r.answers(sentpkt): ans.append(_storage_policy(sentpkt, r)) if verbose > 1: os.write(1, b"*") ok = True if not multi: del hlst[i] notans -= 1 else: if not hasattr(sentpkt, '_answered'): notans -= 1 sentpkt._answered = 1 break if notans == 0 and not multi: del r break if not ok: if verbose > 1: os.write(1, b".") nbrecv += 1 if conf.debug_match: debug.recv.append(r) del r except KeyboardInterrupt: if chainCC: raise finally: stopevent.set() return (hsent, ans, nbrecv, notans)
def function[_sndrcv_rcv, parameter[pks, hsent, stopevent, nbrecv, notans, verbose, chainCC, multi, _storage_policy]]: constant[Function used to receive packets and check their hashret] if <ast.UnaryOp object at 0x7da1b21bdff0> begin[:] variable[_storage_policy] assign[=] <ast.Lambda object at 0x7da1b21bed10> variable[ans] assign[=] list[[]] def function[_get_pkt, parameter[]]: <ast.Tuple object at 0x7da1b21bc730> assign[=] call[name[pks].select, parameter[list[[<ast.Name object at 0x7da1b21bce20>]]]] variable[read_func] assign[=] <ast.BoolOp object at 0x7da1b21bc970> if name[selected] begin[:] return[call[name[read_func], parameter[call[name[selected]][constant[0]]]]] <ast.Try object at 0x7da1b21bc550> return[tuple[[<ast.Name object at 0x7da1b21e3fd0>, <ast.Name object at 0x7da1b21bdbd0>, <ast.Name object at 0x7da1b21bfa30>, <ast.Name object at 0x7da1b21bcd00>]]]
keyword[def] identifier[_sndrcv_rcv] ( identifier[pks] , identifier[hsent] , identifier[stopevent] , identifier[nbrecv] , identifier[notans] , identifier[verbose] , identifier[chainCC] , identifier[multi] , identifier[_storage_policy] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[_storage_policy] : identifier[_storage_policy] = keyword[lambda] identifier[x] , identifier[y] :( identifier[x] , identifier[y] ) identifier[ans] =[] keyword[def] identifier[_get_pkt] (): identifier[selected] , identifier[read_func] = identifier[pks] . identifier[select] ([ identifier[pks] ]) identifier[read_func] = identifier[read_func] keyword[or] identifier[pks] . identifier[__class__] . identifier[recv] keyword[if] identifier[selected] : keyword[return] identifier[read_func] ( identifier[selected] [ literal[int] ]) keyword[try] : keyword[while] keyword[True] : identifier[r] = identifier[_get_pkt] () keyword[if] identifier[stopevent] . identifier[is_set] (): keyword[break] keyword[if] identifier[r] keyword[is] keyword[None] : keyword[continue] identifier[ok] = keyword[False] identifier[h] = identifier[r] . identifier[hashret] () keyword[if] identifier[h] keyword[in] identifier[hsent] : identifier[hlst] = identifier[hsent] [ identifier[h] ] keyword[for] identifier[i] , identifier[sentpkt] keyword[in] identifier[enumerate] ( identifier[hlst] ): keyword[if] identifier[r] . identifier[answers] ( identifier[sentpkt] ): identifier[ans] . identifier[append] ( identifier[_storage_policy] ( identifier[sentpkt] , identifier[r] )) keyword[if] identifier[verbose] > literal[int] : identifier[os] . identifier[write] ( literal[int] , literal[string] ) identifier[ok] = keyword[True] keyword[if] keyword[not] identifier[multi] : keyword[del] identifier[hlst] [ identifier[i] ] identifier[notans] -= literal[int] keyword[else] : keyword[if] keyword[not] identifier[hasattr] ( identifier[sentpkt] , literal[string] ): identifier[notans] -= literal[int] identifier[sentpkt] . identifier[_answered] = literal[int] keyword[break] keyword[if] identifier[notans] == literal[int] keyword[and] keyword[not] identifier[multi] : keyword[del] identifier[r] keyword[break] keyword[if] keyword[not] identifier[ok] : keyword[if] identifier[verbose] > literal[int] : identifier[os] . identifier[write] ( literal[int] , literal[string] ) identifier[nbrecv] += literal[int] keyword[if] identifier[conf] . identifier[debug_match] : identifier[debug] . identifier[recv] . identifier[append] ( identifier[r] ) keyword[del] identifier[r] keyword[except] identifier[KeyboardInterrupt] : keyword[if] identifier[chainCC] : keyword[raise] keyword[finally] : identifier[stopevent] . identifier[set] () keyword[return] ( identifier[hsent] , identifier[ans] , identifier[nbrecv] , identifier[notans] )
def _sndrcv_rcv(pks, hsent, stopevent, nbrecv, notans, verbose, chainCC, multi, _storage_policy=None): """Function used to receive packets and check their hashret""" if not _storage_policy: _storage_policy = lambda x, y: (x, y) # depends on [control=['if'], data=[]] ans = [] def _get_pkt(): # SuperSocket.select() returns, according to each socket type, # the selected sockets + the function to recv() the packets (or None) # (when sockets aren't selectable, should be nonblock_recv) (selected, read_func) = pks.select([pks]) read_func = read_func or pks.__class__.recv if selected: return read_func(selected[0]) # depends on [control=['if'], data=[]] try: while True: r = _get_pkt() if stopevent.is_set(): break # depends on [control=['if'], data=[]] if r is None: continue # depends on [control=['if'], data=[]] ok = False h = r.hashret() if h in hsent: hlst = hsent[h] for (i, sentpkt) in enumerate(hlst): if r.answers(sentpkt): ans.append(_storage_policy(sentpkt, r)) if verbose > 1: os.write(1, b'*') # depends on [control=['if'], data=[]] ok = True if not multi: del hlst[i] notans -= 1 # depends on [control=['if'], data=[]] else: if not hasattr(sentpkt, '_answered'): notans -= 1 # depends on [control=['if'], data=[]] sentpkt._answered = 1 break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['h', 'hsent']] if notans == 0 and (not multi): del r break # depends on [control=['if'], data=[]] if not ok: if verbose > 1: os.write(1, b'.') # depends on [control=['if'], data=[]] nbrecv += 1 if conf.debug_match: debug.recv.append(r) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] del r # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]] except KeyboardInterrupt: if chainCC: raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] finally: stopevent.set() return (hsent, ans, nbrecv, notans)
def recordDataPoint(self, swarmId, generation, errScore): """Record the best score for a swarm's generation index (x) Returns list of swarmIds to terminate. """ terminatedSwarms = [] # Append score to existing swarm. if swarmId in self.swarmScores: entry = self.swarmScores[swarmId] assert(len(entry) == generation) entry.append(errScore) entry = self.swarmBests[swarmId] entry.append(min(errScore, entry[-1])) assert(len(self.swarmBests[swarmId]) == len(self.swarmScores[swarmId])) else: # Create list of scores for a new swarm assert (generation == 0) self.swarmScores[swarmId] = [errScore] self.swarmBests[swarmId] = [errScore] # If the current swarm hasn't completed at least MIN_GENERATIONS, it should # not be candidate for maturation or termination. This prevents the initial # allocation of particles in PSO from killing off a field combination too # early. if generation + 1 < self.MATURITY_WINDOW: return terminatedSwarms # If the swarm has completed more than MAX_GENERATIONS, it should be marked # as mature, regardless of how its value is changing. if self.MAX_GENERATIONS is not None and generation > self.MAX_GENERATIONS: self._logger.info( 'Swarm %s has matured (more than %d generations). Stopping' % (swarmId, self.MAX_GENERATIONS)) terminatedSwarms.append(swarmId) if self._isTerminationEnabled: terminatedSwarms.extend(self._getTerminatedSwarms(generation)) # Return which swarms to kill when we've reached maturity # If there is no change in the swarm's best for some time, # Mark it dead cumulativeBestScores = self.swarmBests[swarmId] if cumulativeBestScores[-1] == cumulativeBestScores[-self.MATURITY_WINDOW]: self._logger.info('Swarm %s has matured (no change in %d generations).' 'Stopping...'% (swarmId, self.MATURITY_WINDOW)) terminatedSwarms.append(swarmId) self.terminatedSwarms = self.terminatedSwarms.union(terminatedSwarms) return terminatedSwarms
def function[recordDataPoint, parameter[self, swarmId, generation, errScore]]: constant[Record the best score for a swarm's generation index (x) Returns list of swarmIds to terminate. ] variable[terminatedSwarms] assign[=] list[[]] if compare[name[swarmId] in name[self].swarmScores] begin[:] variable[entry] assign[=] call[name[self].swarmScores][name[swarmId]] assert[compare[call[name[len], parameter[name[entry]]] equal[==] name[generation]]] call[name[entry].append, parameter[name[errScore]]] variable[entry] assign[=] call[name[self].swarmBests][name[swarmId]] call[name[entry].append, parameter[call[name[min], parameter[name[errScore], call[name[entry]][<ast.UnaryOp object at 0x7da2044c21a0>]]]]] assert[compare[call[name[len], parameter[call[name[self].swarmBests][name[swarmId]]]] equal[==] call[name[len], parameter[call[name[self].swarmScores][name[swarmId]]]]]] if compare[binary_operation[name[generation] + constant[1]] less[<] name[self].MATURITY_WINDOW] begin[:] return[name[terminatedSwarms]] if <ast.BoolOp object at 0x7da2044c2b90> begin[:] call[name[self]._logger.info, parameter[binary_operation[constant[Swarm %s has matured (more than %d generations). Stopping] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f58f850>, <ast.Attribute object at 0x7da18f58cfd0>]]]]] call[name[terminatedSwarms].append, parameter[name[swarmId]]] if name[self]._isTerminationEnabled begin[:] call[name[terminatedSwarms].extend, parameter[call[name[self]._getTerminatedSwarms, parameter[name[generation]]]]] variable[cumulativeBestScores] assign[=] call[name[self].swarmBests][name[swarmId]] if compare[call[name[cumulativeBestScores]][<ast.UnaryOp object at 0x7da18dc06e30>] equal[==] call[name[cumulativeBestScores]][<ast.UnaryOp object at 0x7da18dc07a00>]] begin[:] call[name[self]._logger.info, parameter[binary_operation[constant[Swarm %s has matured (no change in %d generations).Stopping...] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18dc05f00>, <ast.Attribute object at 0x7da18dc068c0>]]]]] call[name[terminatedSwarms].append, parameter[name[swarmId]]] name[self].terminatedSwarms assign[=] call[name[self].terminatedSwarms.union, parameter[name[terminatedSwarms]]] return[name[terminatedSwarms]]
keyword[def] identifier[recordDataPoint] ( identifier[self] , identifier[swarmId] , identifier[generation] , identifier[errScore] ): literal[string] identifier[terminatedSwarms] =[] keyword[if] identifier[swarmId] keyword[in] identifier[self] . identifier[swarmScores] : identifier[entry] = identifier[self] . identifier[swarmScores] [ identifier[swarmId] ] keyword[assert] ( identifier[len] ( identifier[entry] )== identifier[generation] ) identifier[entry] . identifier[append] ( identifier[errScore] ) identifier[entry] = identifier[self] . identifier[swarmBests] [ identifier[swarmId] ] identifier[entry] . identifier[append] ( identifier[min] ( identifier[errScore] , identifier[entry] [- literal[int] ])) keyword[assert] ( identifier[len] ( identifier[self] . identifier[swarmBests] [ identifier[swarmId] ])== identifier[len] ( identifier[self] . identifier[swarmScores] [ identifier[swarmId] ])) keyword[else] : keyword[assert] ( identifier[generation] == literal[int] ) identifier[self] . identifier[swarmScores] [ identifier[swarmId] ]=[ identifier[errScore] ] identifier[self] . identifier[swarmBests] [ identifier[swarmId] ]=[ identifier[errScore] ] keyword[if] identifier[generation] + literal[int] < identifier[self] . identifier[MATURITY_WINDOW] : keyword[return] identifier[terminatedSwarms] keyword[if] identifier[self] . identifier[MAX_GENERATIONS] keyword[is] keyword[not] keyword[None] keyword[and] identifier[generation] > identifier[self] . identifier[MAX_GENERATIONS] : identifier[self] . identifier[_logger] . identifier[info] ( literal[string] % ( identifier[swarmId] , identifier[self] . identifier[MAX_GENERATIONS] )) identifier[terminatedSwarms] . identifier[append] ( identifier[swarmId] ) keyword[if] identifier[self] . identifier[_isTerminationEnabled] : identifier[terminatedSwarms] . identifier[extend] ( identifier[self] . identifier[_getTerminatedSwarms] ( identifier[generation] )) identifier[cumulativeBestScores] = identifier[self] . identifier[swarmBests] [ identifier[swarmId] ] keyword[if] identifier[cumulativeBestScores] [- literal[int] ]== identifier[cumulativeBestScores] [- identifier[self] . identifier[MATURITY_WINDOW] ]: identifier[self] . identifier[_logger] . identifier[info] ( literal[string] literal[string] %( identifier[swarmId] , identifier[self] . identifier[MATURITY_WINDOW] )) identifier[terminatedSwarms] . identifier[append] ( identifier[swarmId] ) identifier[self] . identifier[terminatedSwarms] = identifier[self] . identifier[terminatedSwarms] . identifier[union] ( identifier[terminatedSwarms] ) keyword[return] identifier[terminatedSwarms]
def recordDataPoint(self, swarmId, generation, errScore): """Record the best score for a swarm's generation index (x) Returns list of swarmIds to terminate. """ terminatedSwarms = [] # Append score to existing swarm. if swarmId in self.swarmScores: entry = self.swarmScores[swarmId] assert len(entry) == generation entry.append(errScore) entry = self.swarmBests[swarmId] entry.append(min(errScore, entry[-1])) assert len(self.swarmBests[swarmId]) == len(self.swarmScores[swarmId]) # depends on [control=['if'], data=['swarmId']] else: # Create list of scores for a new swarm assert generation == 0 self.swarmScores[swarmId] = [errScore] self.swarmBests[swarmId] = [errScore] # If the current swarm hasn't completed at least MIN_GENERATIONS, it should # not be candidate for maturation or termination. This prevents the initial # allocation of particles in PSO from killing off a field combination too # early. if generation + 1 < self.MATURITY_WINDOW: return terminatedSwarms # depends on [control=['if'], data=[]] # If the swarm has completed more than MAX_GENERATIONS, it should be marked # as mature, regardless of how its value is changing. if self.MAX_GENERATIONS is not None and generation > self.MAX_GENERATIONS: self._logger.info('Swarm %s has matured (more than %d generations). Stopping' % (swarmId, self.MAX_GENERATIONS)) terminatedSwarms.append(swarmId) # depends on [control=['if'], data=[]] if self._isTerminationEnabled: terminatedSwarms.extend(self._getTerminatedSwarms(generation)) # depends on [control=['if'], data=[]] # Return which swarms to kill when we've reached maturity # If there is no change in the swarm's best for some time, # Mark it dead cumulativeBestScores = self.swarmBests[swarmId] if cumulativeBestScores[-1] == cumulativeBestScores[-self.MATURITY_WINDOW]: self._logger.info('Swarm %s has matured (no change in %d generations).Stopping...' % (swarmId, self.MATURITY_WINDOW)) terminatedSwarms.append(swarmId) # depends on [control=['if'], data=[]] self.terminatedSwarms = self.terminatedSwarms.union(terminatedSwarms) return terminatedSwarms
def pipeline_dataprep(d, segment): """ Single-threaded pipeline for data prep that can be started in a pool. """ logger.debug('dataprep starting for segment %d' % segment) # dataprep reads for a single segment, so d['segment'] defined here d['segment'] = segment # set up numpy arrays, as expected by dataprep functions data_read = numpyview(data_read_mem, 'complex64', datashape(d), raw=False); data = numpyview(data_mem, 'complex64', datashape(d), raw=False) u_read = numpyview(u_read_mem, 'float32', d['nbl'], raw=False); u = numpyview(u_mem, 'float32', d['nbl'], raw=False) v_read = numpyview(v_read_mem, 'float32', d['nbl'], raw=False); v = numpyview(v_mem, 'float32', d['nbl'], raw=False) w_read = numpyview(w_read_mem, 'float32', d['nbl'], raw=False); w = numpyview(w_mem, 'float32', d['nbl'], raw=False) #### #### #### #### # 1) Read data #### #### #### #### with data_read_mem.get_lock(): if d['dataformat'] == 'ms': # CASA-based read segread = pm.readsegment(d, segment) data_read[:] = segread[0] (u_read[:], v_read[:], w_read[:]) = (segread[1][d['readints']/2], segread[2][d['readints']/2], segread[3][d['readints']/2]) # mid int good enough for segment. could extend this to save per chunk del segread elif d['dataformat'] == 'sdm': data_read[:] = ps.read_bdf_segment(d, segment) (u_read[:], v_read[:], w_read[:]) = ps.get_uvw_segment(d, segment) #### #### #### #### # 2) Prepare data #### #### #### #### # calibrate data if os.path.exists(d['gainfile']): try: radec = (); spwind = []; calname = '' # set defaults if '.GN' in d['gainfile']: # if telcal file if d.has_key('calname'): calname = d['calname'] sols = pc.telcal_sol(d['gainfile']) # parse gainfile else: # if CASA table if d.has_key('calradec'): radec = d['calradec'] # optionally defined cal location spwind = d['spw'] sols = pc.casa_sol(d['gainfile'], flagants=d['flagantsol']) # parse gainfile sols.parsebp(d['bpfile']) # parse bpfile # if gainfile parsed ok, choose best solution for data sols.set_selection(d['segmenttimes'][segment].mean(), d['freq']*1e9, rtlib.calc_blarr(d), calname=calname, pols=d['pols'], radec=radec, spwind=spwind) sols.apply(data_read) except: logger.warning('Could not parse or apply gainfile %s.' % d['gainfile']) raise else: logger.warn('Calibration file not found. Proceeding with no calibration applied.') # flag data if len(d['flaglist']): logger.info('Flagging with flaglist: %s' % d['flaglist']) dataflag(d, data_read) else: logger.warn('No real-time flagging.') # mean t vis subtration if d['timesub'] == 'mean': logger.info('Subtracting mean visibility in time...') rtlib.meantsub(data_read, [0, d['nbl']]) else: logger.warn('No mean time subtraction.') # save noise pickle if d['savenoise']: noisepickle(d, data_read, u_read, v_read, w_read, chunk=200) # phase to new location if l1,m1 set and nonzero value try: if any([d['l1'], d['m1']]): logger.info('Rephasing data to (l, m)=(%.4f, %.4f).' % (d['l1'], d['m1'])) rtlib.phaseshift_threaded(data_read, d, d['l1'], d['m1'], u_read, v_read) d['l0'] = d['l1'] d['m0'] = d['m1'] else: logger.debug('Not rephasing.') except KeyError: pass if d['mock']: # could be list or int # assume that std of vis in the middle of the segment is # characteristic of noise throughout the segment falsecands = {} datamid = n.ma.masked_equal(data_read[d['readints']/2].real, 0, copy=True) madstd = 1.4826 * n.ma.median(n.abs(datamid - n.ma.median(datamid)))/n.sqrt(d['npol']*d['nbl']*d['nchan']) std = datamid.std()/n.sqrt(d['npol']*d['nbl']*d['nchan']) logger.debug('Noise per vis in central int: madstd {}, std {}'.format(madstd, std)) dt = 1 # pulse width in integrations if isinstance(d['mock'], int): for i in n.random.randint(d['datadelay'][-1], d['readints'], d['mock']): # add nmock transients at random ints (loff, moff, A, DM) = make_transient(madstd, max(d['dmarr']), Amin=1.2*d['sigma_image1']) candid = (int(segment), int(i), DM, int(dt), int(0)) falsecands[candid] = [A/madstd, A, loff, moff] elif isinstance(d['mock'], list): for mock in d['mock']: try: (i, DM, loff, moff, SNR) = mock candid = (int(segment), int(i), DM, int(dt), int(0)) falsecands[candid] = [SNR, SNR*madstd, loff, moff] except: logger.warn('Could not parse mock parameters: {}'.format(mock)) else: logger.warn('Not a recognized type for mock.') for candid in falsecands: (segment, i, DM, dt, beamnum) = candid (SNR, A, loff, moff) = falsecands[candid] logger.info('Adding mock transient at int %d, DM %.1f, (l, m) = (%f, %f) at est SNR %.1f' % (i, DM, loff, moff, SNR)) add_transient(d, data_read, u_read, v_read, w_read, loff, moff, i, A, DM, dt) if d['savecands']: savecands(d, falsecands, domock=True) with data_mem.get_lock(): data[:] = data_read[:] u[:] = u_read[:]; v[:] = v_read[:]; w[:] = w_read[:] logger.debug('All data unlocked for segment %d' % segment) # d now has segment keyword defined return d
def function[pipeline_dataprep, parameter[d, segment]]: constant[ Single-threaded pipeline for data prep that can be started in a pool. ] call[name[logger].debug, parameter[binary_operation[constant[dataprep starting for segment %d] <ast.Mod object at 0x7da2590d6920> name[segment]]]] call[name[d]][constant[segment]] assign[=] name[segment] variable[data_read] assign[=] call[name[numpyview], parameter[name[data_read_mem], constant[complex64], call[name[datashape], parameter[name[d]]]]] variable[data] assign[=] call[name[numpyview], parameter[name[data_mem], constant[complex64], call[name[datashape], parameter[name[d]]]]] variable[u_read] assign[=] call[name[numpyview], parameter[name[u_read_mem], constant[float32], call[name[d]][constant[nbl]]]] variable[u] assign[=] call[name[numpyview], parameter[name[u_mem], constant[float32], call[name[d]][constant[nbl]]]] variable[v_read] assign[=] call[name[numpyview], parameter[name[v_read_mem], constant[float32], call[name[d]][constant[nbl]]]] variable[v] assign[=] call[name[numpyview], parameter[name[v_mem], constant[float32], call[name[d]][constant[nbl]]]] variable[w_read] assign[=] call[name[numpyview], parameter[name[w_read_mem], constant[float32], call[name[d]][constant[nbl]]]] variable[w] assign[=] call[name[numpyview], parameter[name[w_mem], constant[float32], call[name[d]][constant[nbl]]]] with call[name[data_read_mem].get_lock, parameter[]] begin[:] if compare[call[name[d]][constant[dataformat]] equal[==] constant[ms]] begin[:] variable[segread] assign[=] call[name[pm].readsegment, parameter[name[d], name[segment]]] call[name[data_read]][<ast.Slice object at 0x7da1b2405f30>] assign[=] call[name[segread]][constant[0]] <ast.Tuple object at 0x7da1b2406020> assign[=] tuple[[<ast.Subscript object at 0x7da1b2406230>, <ast.Subscript object at 0x7da1b24063e0>, <ast.Subscript object at 0x7da1b2406590>]] <ast.Delete object at 0x7da1b2406740> if call[name[os].path.exists, parameter[call[name[d]][constant[gainfile]]]] begin[:] <ast.Try object at 0x7da1b2406f20> if call[name[len], parameter[call[name[d]][constant[flaglist]]]] begin[:] call[name[logger].info, parameter[binary_operation[constant[Flagging with flaglist: %s] <ast.Mod object at 0x7da2590d6920> call[name[d]][constant[flaglist]]]]] call[name[dataflag], parameter[name[d], name[data_read]]] if compare[call[name[d]][constant[timesub]] equal[==] constant[mean]] begin[:] call[name[logger].info, parameter[constant[Subtracting mean visibility in time...]]] call[name[rtlib].meantsub, parameter[name[data_read], list[[<ast.Constant object at 0x7da1b2428d90>, <ast.Subscript object at 0x7da1b242aad0>]]]] if call[name[d]][constant[savenoise]] begin[:] call[name[noisepickle], parameter[name[d], name[data_read], name[u_read], name[v_read], name[w_read]]] <ast.Try object at 0x7da1b242aef0> if call[name[d]][constant[mock]] begin[:] variable[falsecands] assign[=] dictionary[[], []] variable[datamid] assign[=] call[name[n].ma.masked_equal, parameter[call[name[data_read]][binary_operation[call[name[d]][constant[readints]] / constant[2]]].real, constant[0]]] variable[madstd] assign[=] binary_operation[binary_operation[constant[1.4826] * call[name[n].ma.median, parameter[call[name[n].abs, parameter[binary_operation[name[datamid] - call[name[n].ma.median, parameter[name[datamid]]]]]]]]] / call[name[n].sqrt, parameter[binary_operation[binary_operation[call[name[d]][constant[npol]] * call[name[d]][constant[nbl]]] * call[name[d]][constant[nchan]]]]]] variable[std] assign[=] binary_operation[call[name[datamid].std, parameter[]] / call[name[n].sqrt, parameter[binary_operation[binary_operation[call[name[d]][constant[npol]] * call[name[d]][constant[nbl]]] * call[name[d]][constant[nchan]]]]]] call[name[logger].debug, parameter[call[constant[Noise per vis in central int: madstd {}, std {}].format, parameter[name[madstd], name[std]]]]] variable[dt] assign[=] constant[1] if call[name[isinstance], parameter[call[name[d]][constant[mock]], name[int]]] begin[:] for taget[name[i]] in starred[call[name[n].random.randint, parameter[call[call[name[d]][constant[datadelay]]][<ast.UnaryOp object at 0x7da1b24812a0>], call[name[d]][constant[readints]], call[name[d]][constant[mock]]]]] begin[:] <ast.Tuple object at 0x7da1b2481840> assign[=] call[name[make_transient], parameter[name[madstd], call[name[max], parameter[call[name[d]][constant[dmarr]]]]]] variable[candid] assign[=] tuple[[<ast.Call object at 0x7da1b24818a0>, <ast.Call object at 0x7da1b2480f10>, <ast.Name object at 0x7da1b2480e80>, <ast.Call object at 0x7da1b2481b40>, <ast.Call object at 0x7da1b2481ba0>]] call[name[falsecands]][name[candid]] assign[=] list[[<ast.BinOp object at 0x7da1b2480610>, <ast.Name object at 0x7da1b2480730>, <ast.Name object at 0x7da1b24805e0>, <ast.Name object at 0x7da1b2480670>]] for taget[name[candid]] in starred[name[falsecands]] begin[:] <ast.Tuple object at 0x7da1b2480d00> assign[=] name[candid] <ast.Tuple object at 0x7da1b2480f70> assign[=] call[name[falsecands]][name[candid]] call[name[logger].info, parameter[binary_operation[constant[Adding mock transient at int %d, DM %.1f, (l, m) = (%f, %f) at est SNR %.1f] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b241c970>, <ast.Name object at 0x7da1b241c8e0>, <ast.Name object at 0x7da1b241c910>, <ast.Name object at 0x7da1b241e500>, <ast.Name object at 0x7da1b241e560>]]]]] call[name[add_transient], parameter[name[d], name[data_read], name[u_read], name[v_read], name[w_read], name[loff], name[moff], name[i], name[A], name[DM], name[dt]]] if call[name[d]][constant[savecands]] begin[:] call[name[savecands], parameter[name[d], name[falsecands]]] with call[name[data_mem].get_lock, parameter[]] begin[:] call[name[data]][<ast.Slice object at 0x7da1b2411960>] assign[=] call[name[data_read]][<ast.Slice object at 0x7da1b24117b0>] call[name[u]][<ast.Slice object at 0x7da1b24122f0>] assign[=] call[name[u_read]][<ast.Slice object at 0x7da1b2411600>] call[name[v]][<ast.Slice object at 0x7da1b2411b40>] assign[=] call[name[v_read]][<ast.Slice object at 0x7da1b2411e10>] call[name[w]][<ast.Slice object at 0x7da1b2411630>] assign[=] call[name[w_read]][<ast.Slice object at 0x7da1b2411540>] call[name[logger].debug, parameter[binary_operation[constant[All data unlocked for segment %d] <ast.Mod object at 0x7da2590d6920> name[segment]]]] return[name[d]]
keyword[def] identifier[pipeline_dataprep] ( identifier[d] , identifier[segment] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] % identifier[segment] ) identifier[d] [ literal[string] ]= identifier[segment] identifier[data_read] = identifier[numpyview] ( identifier[data_read_mem] , literal[string] , identifier[datashape] ( identifier[d] ), identifier[raw] = keyword[False] ); identifier[data] = identifier[numpyview] ( identifier[data_mem] , literal[string] , identifier[datashape] ( identifier[d] ), identifier[raw] = keyword[False] ) identifier[u_read] = identifier[numpyview] ( identifier[u_read_mem] , literal[string] , identifier[d] [ literal[string] ], identifier[raw] = keyword[False] ); identifier[u] = identifier[numpyview] ( identifier[u_mem] , literal[string] , identifier[d] [ literal[string] ], identifier[raw] = keyword[False] ) identifier[v_read] = identifier[numpyview] ( identifier[v_read_mem] , literal[string] , identifier[d] [ literal[string] ], identifier[raw] = keyword[False] ); identifier[v] = identifier[numpyview] ( identifier[v_mem] , literal[string] , identifier[d] [ literal[string] ], identifier[raw] = keyword[False] ) identifier[w_read] = identifier[numpyview] ( identifier[w_read_mem] , literal[string] , identifier[d] [ literal[string] ], identifier[raw] = keyword[False] ); identifier[w] = identifier[numpyview] ( identifier[w_mem] , literal[string] , identifier[d] [ literal[string] ], identifier[raw] = keyword[False] ) keyword[with] identifier[data_read_mem] . identifier[get_lock] (): keyword[if] identifier[d] [ literal[string] ]== literal[string] : identifier[segread] = identifier[pm] . identifier[readsegment] ( identifier[d] , identifier[segment] ) identifier[data_read] [:]= identifier[segread] [ literal[int] ] ( identifier[u_read] [:], identifier[v_read] [:], identifier[w_read] [:])=( identifier[segread] [ literal[int] ][ identifier[d] [ literal[string] ]/ literal[int] ], identifier[segread] [ literal[int] ][ identifier[d] [ literal[string] ]/ literal[int] ], identifier[segread] [ literal[int] ][ identifier[d] [ literal[string] ]/ literal[int] ]) keyword[del] identifier[segread] keyword[elif] identifier[d] [ literal[string] ]== literal[string] : identifier[data_read] [:]= identifier[ps] . identifier[read_bdf_segment] ( identifier[d] , identifier[segment] ) ( identifier[u_read] [:], identifier[v_read] [:], identifier[w_read] [:])= identifier[ps] . identifier[get_uvw_segment] ( identifier[d] , identifier[segment] ) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[d] [ literal[string] ]): keyword[try] : identifier[radec] =(); identifier[spwind] =[]; identifier[calname] = literal[string] keyword[if] literal[string] keyword[in] identifier[d] [ literal[string] ]: keyword[if] identifier[d] . identifier[has_key] ( literal[string] ): identifier[calname] = identifier[d] [ literal[string] ] identifier[sols] = identifier[pc] . identifier[telcal_sol] ( identifier[d] [ literal[string] ]) keyword[else] : keyword[if] identifier[d] . identifier[has_key] ( literal[string] ): identifier[radec] = identifier[d] [ literal[string] ] identifier[spwind] = identifier[d] [ literal[string] ] identifier[sols] = identifier[pc] . identifier[casa_sol] ( identifier[d] [ literal[string] ], identifier[flagants] = identifier[d] [ literal[string] ]) identifier[sols] . identifier[parsebp] ( identifier[d] [ literal[string] ]) identifier[sols] . identifier[set_selection] ( identifier[d] [ literal[string] ][ identifier[segment] ]. identifier[mean] (), identifier[d] [ literal[string] ]* literal[int] , identifier[rtlib] . identifier[calc_blarr] ( identifier[d] ), identifier[calname] = identifier[calname] , identifier[pols] = identifier[d] [ literal[string] ], identifier[radec] = identifier[radec] , identifier[spwind] = identifier[spwind] ) identifier[sols] . identifier[apply] ( identifier[data_read] ) keyword[except] : identifier[logger] . identifier[warning] ( literal[string] % identifier[d] [ literal[string] ]) keyword[raise] keyword[else] : identifier[logger] . identifier[warn] ( literal[string] ) keyword[if] identifier[len] ( identifier[d] [ literal[string] ]): identifier[logger] . identifier[info] ( literal[string] % identifier[d] [ literal[string] ]) identifier[dataflag] ( identifier[d] , identifier[data_read] ) keyword[else] : identifier[logger] . identifier[warn] ( literal[string] ) keyword[if] identifier[d] [ literal[string] ]== literal[string] : identifier[logger] . identifier[info] ( literal[string] ) identifier[rtlib] . identifier[meantsub] ( identifier[data_read] ,[ literal[int] , identifier[d] [ literal[string] ]]) keyword[else] : identifier[logger] . identifier[warn] ( literal[string] ) keyword[if] identifier[d] [ literal[string] ]: identifier[noisepickle] ( identifier[d] , identifier[data_read] , identifier[u_read] , identifier[v_read] , identifier[w_read] , identifier[chunk] = literal[int] ) keyword[try] : keyword[if] identifier[any] ([ identifier[d] [ literal[string] ], identifier[d] [ literal[string] ]]): identifier[logger] . identifier[info] ( literal[string] %( identifier[d] [ literal[string] ], identifier[d] [ literal[string] ])) identifier[rtlib] . identifier[phaseshift_threaded] ( identifier[data_read] , identifier[d] , identifier[d] [ literal[string] ], identifier[d] [ literal[string] ], identifier[u_read] , identifier[v_read] ) identifier[d] [ literal[string] ]= identifier[d] [ literal[string] ] identifier[d] [ literal[string] ]= identifier[d] [ literal[string] ] keyword[else] : identifier[logger] . identifier[debug] ( literal[string] ) keyword[except] identifier[KeyError] : keyword[pass] keyword[if] identifier[d] [ literal[string] ]: identifier[falsecands] ={} identifier[datamid] = identifier[n] . identifier[ma] . identifier[masked_equal] ( identifier[data_read] [ identifier[d] [ literal[string] ]/ literal[int] ]. identifier[real] , literal[int] , identifier[copy] = keyword[True] ) identifier[madstd] = literal[int] * identifier[n] . identifier[ma] . identifier[median] ( identifier[n] . identifier[abs] ( identifier[datamid] - identifier[n] . identifier[ma] . identifier[median] ( identifier[datamid] )))/ identifier[n] . identifier[sqrt] ( identifier[d] [ literal[string] ]* identifier[d] [ literal[string] ]* identifier[d] [ literal[string] ]) identifier[std] = identifier[datamid] . identifier[std] ()/ identifier[n] . identifier[sqrt] ( identifier[d] [ literal[string] ]* identifier[d] [ literal[string] ]* identifier[d] [ literal[string] ]) identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[madstd] , identifier[std] )) identifier[dt] = literal[int] keyword[if] identifier[isinstance] ( identifier[d] [ literal[string] ], identifier[int] ): keyword[for] identifier[i] keyword[in] identifier[n] . identifier[random] . identifier[randint] ( identifier[d] [ literal[string] ][- literal[int] ], identifier[d] [ literal[string] ], identifier[d] [ literal[string] ]): ( identifier[loff] , identifier[moff] , identifier[A] , identifier[DM] )= identifier[make_transient] ( identifier[madstd] , identifier[max] ( identifier[d] [ literal[string] ]), identifier[Amin] = literal[int] * identifier[d] [ literal[string] ]) identifier[candid] =( identifier[int] ( identifier[segment] ), identifier[int] ( identifier[i] ), identifier[DM] , identifier[int] ( identifier[dt] ), identifier[int] ( literal[int] )) identifier[falsecands] [ identifier[candid] ]=[ identifier[A] / identifier[madstd] , identifier[A] , identifier[loff] , identifier[moff] ] keyword[elif] identifier[isinstance] ( identifier[d] [ literal[string] ], identifier[list] ): keyword[for] identifier[mock] keyword[in] identifier[d] [ literal[string] ]: keyword[try] : ( identifier[i] , identifier[DM] , identifier[loff] , identifier[moff] , identifier[SNR] )= identifier[mock] identifier[candid] =( identifier[int] ( identifier[segment] ), identifier[int] ( identifier[i] ), identifier[DM] , identifier[int] ( identifier[dt] ), identifier[int] ( literal[int] )) identifier[falsecands] [ identifier[candid] ]=[ identifier[SNR] , identifier[SNR] * identifier[madstd] , identifier[loff] , identifier[moff] ] keyword[except] : identifier[logger] . identifier[warn] ( literal[string] . identifier[format] ( identifier[mock] )) keyword[else] : identifier[logger] . identifier[warn] ( literal[string] ) keyword[for] identifier[candid] keyword[in] identifier[falsecands] : ( identifier[segment] , identifier[i] , identifier[DM] , identifier[dt] , identifier[beamnum] )= identifier[candid] ( identifier[SNR] , identifier[A] , identifier[loff] , identifier[moff] )= identifier[falsecands] [ identifier[candid] ] identifier[logger] . identifier[info] ( literal[string] %( identifier[i] , identifier[DM] , identifier[loff] , identifier[moff] , identifier[SNR] )) identifier[add_transient] ( identifier[d] , identifier[data_read] , identifier[u_read] , identifier[v_read] , identifier[w_read] , identifier[loff] , identifier[moff] , identifier[i] , identifier[A] , identifier[DM] , identifier[dt] ) keyword[if] identifier[d] [ literal[string] ]: identifier[savecands] ( identifier[d] , identifier[falsecands] , identifier[domock] = keyword[True] ) keyword[with] identifier[data_mem] . identifier[get_lock] (): identifier[data] [:]= identifier[data_read] [:] identifier[u] [:]= identifier[u_read] [:]; identifier[v] [:]= identifier[v_read] [:]; identifier[w] [:]= identifier[w_read] [:] identifier[logger] . identifier[debug] ( literal[string] % identifier[segment] ) keyword[return] identifier[d]
def pipeline_dataprep(d, segment): """ Single-threaded pipeline for data prep that can be started in a pool. """ logger.debug('dataprep starting for segment %d' % segment) # dataprep reads for a single segment, so d['segment'] defined here d['segment'] = segment # set up numpy arrays, as expected by dataprep functions data_read = numpyview(data_read_mem, 'complex64', datashape(d), raw=False) data = numpyview(data_mem, 'complex64', datashape(d), raw=False) u_read = numpyview(u_read_mem, 'float32', d['nbl'], raw=False) u = numpyview(u_mem, 'float32', d['nbl'], raw=False) v_read = numpyview(v_read_mem, 'float32', d['nbl'], raw=False) v = numpyview(v_mem, 'float32', d['nbl'], raw=False) w_read = numpyview(w_read_mem, 'float32', d['nbl'], raw=False) w = numpyview(w_mem, 'float32', d['nbl'], raw=False) #### #### #### #### # 1) Read data #### #### #### #### with data_read_mem.get_lock(): if d['dataformat'] == 'ms': # CASA-based read segread = pm.readsegment(d, segment) data_read[:] = segread[0] (u_read[:], v_read[:], w_read[:]) = (segread[1][d['readints'] / 2], segread[2][d['readints'] / 2], segread[3][d['readints'] / 2]) # mid int good enough for segment. could extend this to save per chunk del segread # depends on [control=['if'], data=[]] elif d['dataformat'] == 'sdm': data_read[:] = ps.read_bdf_segment(d, segment) (u_read[:], v_read[:], w_read[:]) = ps.get_uvw_segment(d, segment) # depends on [control=['if'], data=[]] #### #### #### #### # 2) Prepare data #### #### #### #### # calibrate data if os.path.exists(d['gainfile']): try: radec = () spwind = [] calname = '' # set defaults if '.GN' in d['gainfile']: # if telcal file if d.has_key('calname'): calname = d['calname'] # depends on [control=['if'], data=[]] sols = pc.telcal_sol(d['gainfile']) # parse gainfile # depends on [control=['if'], data=[]] else: # if CASA table if d.has_key('calradec'): radec = d['calradec'] # optionally defined cal location # depends on [control=['if'], data=[]] spwind = d['spw'] sols = pc.casa_sol(d['gainfile'], flagants=d['flagantsol']) # parse gainfile sols.parsebp(d['bpfile']) # parse bpfile # if gainfile parsed ok, choose best solution for data sols.set_selection(d['segmenttimes'][segment].mean(), d['freq'] * 1000000000.0, rtlib.calc_blarr(d), calname=calname, pols=d['pols'], radec=radec, spwind=spwind) sols.apply(data_read) # depends on [control=['try'], data=[]] except: logger.warning('Could not parse or apply gainfile %s.' % d['gainfile']) raise # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: logger.warn('Calibration file not found. Proceeding with no calibration applied.') # flag data if len(d['flaglist']): logger.info('Flagging with flaglist: %s' % d['flaglist']) dataflag(d, data_read) # depends on [control=['if'], data=[]] else: logger.warn('No real-time flagging.') # mean t vis subtration if d['timesub'] == 'mean': logger.info('Subtracting mean visibility in time...') rtlib.meantsub(data_read, [0, d['nbl']]) # depends on [control=['if'], data=[]] else: logger.warn('No mean time subtraction.') # save noise pickle if d['savenoise']: noisepickle(d, data_read, u_read, v_read, w_read, chunk=200) # depends on [control=['if'], data=[]] # phase to new location if l1,m1 set and nonzero value try: if any([d['l1'], d['m1']]): logger.info('Rephasing data to (l, m)=(%.4f, %.4f).' % (d['l1'], d['m1'])) rtlib.phaseshift_threaded(data_read, d, d['l1'], d['m1'], u_read, v_read) d['l0'] = d['l1'] d['m0'] = d['m1'] # depends on [control=['if'], data=[]] else: logger.debug('Not rephasing.') # depends on [control=['try'], data=[]] except KeyError: pass # depends on [control=['except'], data=[]] if d['mock']: # could be list or int # assume that std of vis in the middle of the segment is # characteristic of noise throughout the segment falsecands = {} datamid = n.ma.masked_equal(data_read[d['readints'] / 2].real, 0, copy=True) madstd = 1.4826 * n.ma.median(n.abs(datamid - n.ma.median(datamid))) / n.sqrt(d['npol'] * d['nbl'] * d['nchan']) std = datamid.std() / n.sqrt(d['npol'] * d['nbl'] * d['nchan']) logger.debug('Noise per vis in central int: madstd {}, std {}'.format(madstd, std)) dt = 1 # pulse width in integrations if isinstance(d['mock'], int): for i in n.random.randint(d['datadelay'][-1], d['readints'], d['mock']): # add nmock transients at random ints (loff, moff, A, DM) = make_transient(madstd, max(d['dmarr']), Amin=1.2 * d['sigma_image1']) candid = (int(segment), int(i), DM, int(dt), int(0)) falsecands[candid] = [A / madstd, A, loff, moff] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] elif isinstance(d['mock'], list): for mock in d['mock']: try: (i, DM, loff, moff, SNR) = mock candid = (int(segment), int(i), DM, int(dt), int(0)) falsecands[candid] = [SNR, SNR * madstd, loff, moff] # depends on [control=['try'], data=[]] except: logger.warn('Could not parse mock parameters: {}'.format(mock)) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['mock']] # depends on [control=['if'], data=[]] else: logger.warn('Not a recognized type for mock.') for candid in falsecands: (segment, i, DM, dt, beamnum) = candid (SNR, A, loff, moff) = falsecands[candid] logger.info('Adding mock transient at int %d, DM %.1f, (l, m) = (%f, %f) at est SNR %.1f' % (i, DM, loff, moff, SNR)) add_transient(d, data_read, u_read, v_read, w_read, loff, moff, i, A, DM, dt) # depends on [control=['for'], data=['candid']] if d['savecands']: savecands(d, falsecands, domock=True) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] with data_mem.get_lock(): data[:] = data_read[:] u[:] = u_read[:] v[:] = v_read[:] w[:] = w_read[:] # depends on [control=['with'], data=[]] # depends on [control=['with'], data=[]] logger.debug('All data unlocked for segment %d' % segment) # d now has segment keyword defined return d
def rescale_gradients(model: Model, grad_norm: Optional[float] = None) -> Optional[float]: """ Performs gradient rescaling. Is a no-op if gradient rescaling is not enabled. """ if grad_norm: parameters_to_clip = [p for p in model.parameters() if p.grad is not None] return sparse_clip_norm(parameters_to_clip, grad_norm) return None
def function[rescale_gradients, parameter[model, grad_norm]]: constant[ Performs gradient rescaling. Is a no-op if gradient rescaling is not enabled. ] if name[grad_norm] begin[:] variable[parameters_to_clip] assign[=] <ast.ListComp object at 0x7da20c7cba30> return[call[name[sparse_clip_norm], parameter[name[parameters_to_clip], name[grad_norm]]]] return[constant[None]]
keyword[def] identifier[rescale_gradients] ( identifier[model] : identifier[Model] , identifier[grad_norm] : identifier[Optional] [ identifier[float] ]= keyword[None] )-> identifier[Optional] [ identifier[float] ]: literal[string] keyword[if] identifier[grad_norm] : identifier[parameters_to_clip] =[ identifier[p] keyword[for] identifier[p] keyword[in] identifier[model] . identifier[parameters] () keyword[if] identifier[p] . identifier[grad] keyword[is] keyword[not] keyword[None] ] keyword[return] identifier[sparse_clip_norm] ( identifier[parameters_to_clip] , identifier[grad_norm] ) keyword[return] keyword[None]
def rescale_gradients(model: Model, grad_norm: Optional[float]=None) -> Optional[float]: """ Performs gradient rescaling. Is a no-op if gradient rescaling is not enabled. """ if grad_norm: parameters_to_clip = [p for p in model.parameters() if p.grad is not None] return sparse_clip_norm(parameters_to_clip, grad_norm) # depends on [control=['if'], data=[]] return None
def get_installed_classes(cls): """ Iterates over installed plugins associated with the `entry_point` and returns a dictionary of viable ones keyed off of their names. A viable installed plugin is one that is both loadable *and* a subclass of the Pluggable subclass in question. """ installed_classes = {} for entry_point in pkg_resources.iter_entry_points(cls.entry_point): try: plugin = entry_point.load() except ImportError as e: logger.error( "Could not load plugin %s: %s", entry_point.name, str(e) ) continue if not issubclass(plugin, cls): logger.error( "Could not load plugin %s:" + " %s class is not subclass of %s", entry_point.name, plugin.__class__.__name__, cls.__name__ ) continue if not plugin.validate_dependencies(): logger.error( "Could not load plugin %s:" + " %s class dependencies not met", entry_point.name, plugin.__name__ ) continue installed_classes[entry_point.name] = plugin return installed_classes
def function[get_installed_classes, parameter[cls]]: constant[ Iterates over installed plugins associated with the `entry_point` and returns a dictionary of viable ones keyed off of their names. A viable installed plugin is one that is both loadable *and* a subclass of the Pluggable subclass in question. ] variable[installed_classes] assign[=] dictionary[[], []] for taget[name[entry_point]] in starred[call[name[pkg_resources].iter_entry_points, parameter[name[cls].entry_point]]] begin[:] <ast.Try object at 0x7da207f03e50> if <ast.UnaryOp object at 0x7da207f02350> begin[:] call[name[logger].error, parameter[binary_operation[constant[Could not load plugin %s:] + constant[ %s class is not subclass of %s]], name[entry_point].name, name[plugin].__class__.__name__, name[cls].__name__]] continue if <ast.UnaryOp object at 0x7da18f09dc30> begin[:] call[name[logger].error, parameter[binary_operation[constant[Could not load plugin %s:] + constant[ %s class dependencies not met]], name[entry_point].name, name[plugin].__name__]] continue call[name[installed_classes]][name[entry_point].name] assign[=] name[plugin] return[name[installed_classes]]
keyword[def] identifier[get_installed_classes] ( identifier[cls] ): literal[string] identifier[installed_classes] ={} keyword[for] identifier[entry_point] keyword[in] identifier[pkg_resources] . identifier[iter_entry_points] ( identifier[cls] . identifier[entry_point] ): keyword[try] : identifier[plugin] = identifier[entry_point] . identifier[load] () keyword[except] identifier[ImportError] keyword[as] identifier[e] : identifier[logger] . identifier[error] ( literal[string] , identifier[entry_point] . identifier[name] , identifier[str] ( identifier[e] ) ) keyword[continue] keyword[if] keyword[not] identifier[issubclass] ( identifier[plugin] , identifier[cls] ): identifier[logger] . identifier[error] ( literal[string] + literal[string] , identifier[entry_point] . identifier[name] , identifier[plugin] . identifier[__class__] . identifier[__name__] , identifier[cls] . identifier[__name__] ) keyword[continue] keyword[if] keyword[not] identifier[plugin] . identifier[validate_dependencies] (): identifier[logger] . identifier[error] ( literal[string] + literal[string] , identifier[entry_point] . identifier[name] , identifier[plugin] . identifier[__name__] ) keyword[continue] identifier[installed_classes] [ identifier[entry_point] . identifier[name] ]= identifier[plugin] keyword[return] identifier[installed_classes]
def get_installed_classes(cls): """ Iterates over installed plugins associated with the `entry_point` and returns a dictionary of viable ones keyed off of their names. A viable installed plugin is one that is both loadable *and* a subclass of the Pluggable subclass in question. """ installed_classes = {} for entry_point in pkg_resources.iter_entry_points(cls.entry_point): try: plugin = entry_point.load() # depends on [control=['try'], data=[]] except ImportError as e: logger.error('Could not load plugin %s: %s', entry_point.name, str(e)) continue # depends on [control=['except'], data=['e']] if not issubclass(plugin, cls): logger.error('Could not load plugin %s:' + ' %s class is not subclass of %s', entry_point.name, plugin.__class__.__name__, cls.__name__) continue # depends on [control=['if'], data=[]] if not plugin.validate_dependencies(): logger.error('Could not load plugin %s:' + ' %s class dependencies not met', entry_point.name, plugin.__name__) continue # depends on [control=['if'], data=[]] installed_classes[entry_point.name] = plugin # depends on [control=['for'], data=['entry_point']] return installed_classes
def _bootstrap_arch(name, **kwargs): ''' Bootstrap an Arch Linux container ''' if not salt.utils.path.which('pacstrap'): raise CommandExecutionError( 'pacstrap not found, is the arch-install-scripts package ' 'installed?' ) dst = _make_container_root(name) cmd = 'pacstrap -c -d {0} base'.format(dst) ret = __salt__['cmd.run_all'](cmd, python_shell=False) if ret['retcode'] != 0: _build_failed(dst, name) return ret
def function[_bootstrap_arch, parameter[name]]: constant[ Bootstrap an Arch Linux container ] if <ast.UnaryOp object at 0x7da207f03ee0> begin[:] <ast.Raise object at 0x7da207f038b0> variable[dst] assign[=] call[name[_make_container_root], parameter[name[name]]] variable[cmd] assign[=] call[constant[pacstrap -c -d {0} base].format, parameter[name[dst]]] variable[ret] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[name[cmd]]] if compare[call[name[ret]][constant[retcode]] not_equal[!=] constant[0]] begin[:] call[name[_build_failed], parameter[name[dst], name[name]]] return[name[ret]]
keyword[def] identifier[_bootstrap_arch] ( identifier[name] ,** identifier[kwargs] ): literal[string] keyword[if] keyword[not] identifier[salt] . identifier[utils] . identifier[path] . identifier[which] ( literal[string] ): keyword[raise] identifier[CommandExecutionError] ( literal[string] literal[string] ) identifier[dst] = identifier[_make_container_root] ( identifier[name] ) identifier[cmd] = literal[string] . identifier[format] ( identifier[dst] ) identifier[ret] = identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[python_shell] = keyword[False] ) keyword[if] identifier[ret] [ literal[string] ]!= literal[int] : identifier[_build_failed] ( identifier[dst] , identifier[name] ) keyword[return] identifier[ret]
def _bootstrap_arch(name, **kwargs): """ Bootstrap an Arch Linux container """ if not salt.utils.path.which('pacstrap'): raise CommandExecutionError('pacstrap not found, is the arch-install-scripts package installed?') # depends on [control=['if'], data=[]] dst = _make_container_root(name) cmd = 'pacstrap -c -d {0} base'.format(dst) ret = __salt__['cmd.run_all'](cmd, python_shell=False) if ret['retcode'] != 0: _build_failed(dst, name) # depends on [control=['if'], data=[]] return ret
def try_pull_image_from_registry(self, image_name, image_tag): """ Tries to pull a image with the tag ``image_tag`` from registry set by ``use_registry_name``. After the image is pulled, it's tagged with ``image_name``:``image_tag`` so lookup can be made locally next time. :return: A :class:`Image <docker.models.images.Image>` instance if the image exists, ``None`` otherwise. :rtype: Optional[docker.models.images.Image] """ try: image = self.client.images.pull(self.use_registry_name, image_tag) except (docker.errors.ImageNotFound, docker.errors.NotFound): # the image doesn't exist logger.info("Tried to pull %s:%s from a registry, not found", self.use_registry_name, image_tag) return None logger.info("Pulled %s:%s from registry, tagged %s:%s", self.use_registry_name, image_tag, image_name, image_tag) # the name and tag are different on the repo, let's tag it with local name so exists checks run smoothly image.tag(image_name, image_tag) return image
def function[try_pull_image_from_registry, parameter[self, image_name, image_tag]]: constant[ Tries to pull a image with the tag ``image_tag`` from registry set by ``use_registry_name``. After the image is pulled, it's tagged with ``image_name``:``image_tag`` so lookup can be made locally next time. :return: A :class:`Image <docker.models.images.Image>` instance if the image exists, ``None`` otherwise. :rtype: Optional[docker.models.images.Image] ] <ast.Try object at 0x7da20e9b1360> call[name[logger].info, parameter[constant[Pulled %s:%s from registry, tagged %s:%s], name[self].use_registry_name, name[image_tag], name[image_name], name[image_tag]]] call[name[image].tag, parameter[name[image_name], name[image_tag]]] return[name[image]]
keyword[def] identifier[try_pull_image_from_registry] ( identifier[self] , identifier[image_name] , identifier[image_tag] ): literal[string] keyword[try] : identifier[image] = identifier[self] . identifier[client] . identifier[images] . identifier[pull] ( identifier[self] . identifier[use_registry_name] , identifier[image_tag] ) keyword[except] ( identifier[docker] . identifier[errors] . identifier[ImageNotFound] , identifier[docker] . identifier[errors] . identifier[NotFound] ): identifier[logger] . identifier[info] ( literal[string] , identifier[self] . identifier[use_registry_name] , identifier[image_tag] ) keyword[return] keyword[None] identifier[logger] . identifier[info] ( literal[string] , identifier[self] . identifier[use_registry_name] , identifier[image_tag] , identifier[image_name] , identifier[image_tag] ) identifier[image] . identifier[tag] ( identifier[image_name] , identifier[image_tag] ) keyword[return] identifier[image]
def try_pull_image_from_registry(self, image_name, image_tag): """ Tries to pull a image with the tag ``image_tag`` from registry set by ``use_registry_name``. After the image is pulled, it's tagged with ``image_name``:``image_tag`` so lookup can be made locally next time. :return: A :class:`Image <docker.models.images.Image>` instance if the image exists, ``None`` otherwise. :rtype: Optional[docker.models.images.Image] """ try: image = self.client.images.pull(self.use_registry_name, image_tag) # depends on [control=['try'], data=[]] except (docker.errors.ImageNotFound, docker.errors.NotFound): # the image doesn't exist logger.info('Tried to pull %s:%s from a registry, not found', self.use_registry_name, image_tag) return None # depends on [control=['except'], data=[]] logger.info('Pulled %s:%s from registry, tagged %s:%s', self.use_registry_name, image_tag, image_name, image_tag) # the name and tag are different on the repo, let's tag it with local name so exists checks run smoothly image.tag(image_name, image_tag) return image
def fact(name, puppet=False): ''' Run facter for a specific fact CLI Example: .. code-block:: bash salt '*' puppet.fact kernel ''' opt_puppet = '--puppet' if puppet else '' ret = __salt__['cmd.run_all']( 'facter {0} {1}'.format(opt_puppet, name), python_shell=False) if ret['retcode'] != 0: raise CommandExecutionError(ret['stderr']) if not ret['stdout']: return '' return ret['stdout']
def function[fact, parameter[name, puppet]]: constant[ Run facter for a specific fact CLI Example: .. code-block:: bash salt '*' puppet.fact kernel ] variable[opt_puppet] assign[=] <ast.IfExp object at 0x7da18f00caf0> variable[ret] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[call[constant[facter {0} {1}].format, parameter[name[opt_puppet], name[name]]]]] if compare[call[name[ret]][constant[retcode]] not_equal[!=] constant[0]] begin[:] <ast.Raise object at 0x7da18f00d390> if <ast.UnaryOp object at 0x7da18f00c4c0> begin[:] return[constant[]] return[call[name[ret]][constant[stdout]]]
keyword[def] identifier[fact] ( identifier[name] , identifier[puppet] = keyword[False] ): literal[string] identifier[opt_puppet] = literal[string] keyword[if] identifier[puppet] keyword[else] literal[string] identifier[ret] = identifier[__salt__] [ literal[string] ]( literal[string] . identifier[format] ( identifier[opt_puppet] , identifier[name] ), identifier[python_shell] = keyword[False] ) keyword[if] identifier[ret] [ literal[string] ]!= literal[int] : keyword[raise] identifier[CommandExecutionError] ( identifier[ret] [ literal[string] ]) keyword[if] keyword[not] identifier[ret] [ literal[string] ]: keyword[return] literal[string] keyword[return] identifier[ret] [ literal[string] ]
def fact(name, puppet=False): """ Run facter for a specific fact CLI Example: .. code-block:: bash salt '*' puppet.fact kernel """ opt_puppet = '--puppet' if puppet else '' ret = __salt__['cmd.run_all']('facter {0} {1}'.format(opt_puppet, name), python_shell=False) if ret['retcode'] != 0: raise CommandExecutionError(ret['stderr']) # depends on [control=['if'], data=[]] if not ret['stdout']: return '' # depends on [control=['if'], data=[]] return ret['stdout']
def set_options(self, **options): """ Set instance variables based on an options dict """ self.interactive = options['interactive'] self.verbosity = options['verbosity'] self.symlink = options['link'] self.clear = options['clear'] self.dry_run = options['dry_run'] ignore_patterns = options['ignore_patterns'] if options['use_default_ignore_patterns']: ignore_patterns += ['CVS', '.*', '*~'] self.ignore_patterns = list(set(ignore_patterns)) self.post_process = options['post_process']
def function[set_options, parameter[self]]: constant[ Set instance variables based on an options dict ] name[self].interactive assign[=] call[name[options]][constant[interactive]] name[self].verbosity assign[=] call[name[options]][constant[verbosity]] name[self].symlink assign[=] call[name[options]][constant[link]] name[self].clear assign[=] call[name[options]][constant[clear]] name[self].dry_run assign[=] call[name[options]][constant[dry_run]] variable[ignore_patterns] assign[=] call[name[options]][constant[ignore_patterns]] if call[name[options]][constant[use_default_ignore_patterns]] begin[:] <ast.AugAssign object at 0x7da1aff1f3a0> name[self].ignore_patterns assign[=] call[name[list], parameter[call[name[set], parameter[name[ignore_patterns]]]]] name[self].post_process assign[=] call[name[options]][constant[post_process]]
keyword[def] identifier[set_options] ( identifier[self] ,** identifier[options] ): literal[string] identifier[self] . identifier[interactive] = identifier[options] [ literal[string] ] identifier[self] . identifier[verbosity] = identifier[options] [ literal[string] ] identifier[self] . identifier[symlink] = identifier[options] [ literal[string] ] identifier[self] . identifier[clear] = identifier[options] [ literal[string] ] identifier[self] . identifier[dry_run] = identifier[options] [ literal[string] ] identifier[ignore_patterns] = identifier[options] [ literal[string] ] keyword[if] identifier[options] [ literal[string] ]: identifier[ignore_patterns] +=[ literal[string] , literal[string] , literal[string] ] identifier[self] . identifier[ignore_patterns] = identifier[list] ( identifier[set] ( identifier[ignore_patterns] )) identifier[self] . identifier[post_process] = identifier[options] [ literal[string] ]
def set_options(self, **options): """ Set instance variables based on an options dict """ self.interactive = options['interactive'] self.verbosity = options['verbosity'] self.symlink = options['link'] self.clear = options['clear'] self.dry_run = options['dry_run'] ignore_patterns = options['ignore_patterns'] if options['use_default_ignore_patterns']: ignore_patterns += ['CVS', '.*', '*~'] # depends on [control=['if'], data=[]] self.ignore_patterns = list(set(ignore_patterns)) self.post_process = options['post_process']
def addonModules(cls, recurse=True): """ Returns all the modules that this addon class uses to load plugins from. :param recurse | <bool> :return [<str> || <module>, ..] """ prop = '_{0}__addon_modules'.format(cls.__name__) out = set() # lookup base classes if recurse: for base in cls.__bases__: if issubclass(base, AddonManager): out.update(base.addonModules(recurse)) # always use the highest level for any given key out.update(getattr(cls, prop, set())) return out
def function[addonModules, parameter[cls, recurse]]: constant[ Returns all the modules that this addon class uses to load plugins from. :param recurse | <bool> :return [<str> || <module>, ..] ] variable[prop] assign[=] call[constant[_{0}__addon_modules].format, parameter[name[cls].__name__]] variable[out] assign[=] call[name[set], parameter[]] if name[recurse] begin[:] for taget[name[base]] in starred[name[cls].__bases__] begin[:] if call[name[issubclass], parameter[name[base], name[AddonManager]]] begin[:] call[name[out].update, parameter[call[name[base].addonModules, parameter[name[recurse]]]]] call[name[out].update, parameter[call[name[getattr], parameter[name[cls], name[prop], call[name[set], parameter[]]]]]] return[name[out]]
keyword[def] identifier[addonModules] ( identifier[cls] , identifier[recurse] = keyword[True] ): literal[string] identifier[prop] = literal[string] . identifier[format] ( identifier[cls] . identifier[__name__] ) identifier[out] = identifier[set] () keyword[if] identifier[recurse] : keyword[for] identifier[base] keyword[in] identifier[cls] . identifier[__bases__] : keyword[if] identifier[issubclass] ( identifier[base] , identifier[AddonManager] ): identifier[out] . identifier[update] ( identifier[base] . identifier[addonModules] ( identifier[recurse] )) identifier[out] . identifier[update] ( identifier[getattr] ( identifier[cls] , identifier[prop] , identifier[set] ())) keyword[return] identifier[out]
def addonModules(cls, recurse=True): """ Returns all the modules that this addon class uses to load plugins from. :param recurse | <bool> :return [<str> || <module>, ..] """ prop = '_{0}__addon_modules'.format(cls.__name__) out = set() # lookup base classes if recurse: for base in cls.__bases__: if issubclass(base, AddonManager): out.update(base.addonModules(recurse)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['base']] # depends on [control=['if'], data=[]] # always use the highest level for any given key out.update(getattr(cls, prop, set())) return out
def addAsn1MibSource(self, *asn1Sources, **kwargs): """Adds path to a repository to search ASN.1 MIB files. Parameters ---------- *asn1Sources : one or more URL in form of :py:obj:`str` identifying local or remote ASN.1 MIB repositories. Path must include the *@mib@* component which will be replaced with MIB module name at the time of search. Returns ------- : :py:class:`~pysnmp.smi.rfc1902.ObjectIdentity` reference to itself Notes ----- Please refer to :py:class:`~pysmi.reader.localfile.FileReader`, :py:class:`~pysmi.reader.httpclient.HttpReader` and :py:class:`~pysmi.reader.ftpclient.FtpReader` classes for in-depth information on ASN.1 MIB lookup. Examples -------- >>> ObjectIdentity('SNMPv2-MIB', 'sysDescr').addAsn1Source('http://mibs.snmplabs.com/asn1/@mib@') ObjectIdentity('SNMPv2-MIB', 'sysDescr') >>> """ if self._asn1SourcesToAdd is None: self._asn1SourcesToAdd = asn1Sources else: self._asn1SourcesToAdd += asn1Sources if self._asn1SourcesOptions: self._asn1SourcesOptions.update(kwargs) else: self._asn1SourcesOptions = kwargs return self
def function[addAsn1MibSource, parameter[self]]: constant[Adds path to a repository to search ASN.1 MIB files. Parameters ---------- *asn1Sources : one or more URL in form of :py:obj:`str` identifying local or remote ASN.1 MIB repositories. Path must include the *@mib@* component which will be replaced with MIB module name at the time of search. Returns ------- : :py:class:`~pysnmp.smi.rfc1902.ObjectIdentity` reference to itself Notes ----- Please refer to :py:class:`~pysmi.reader.localfile.FileReader`, :py:class:`~pysmi.reader.httpclient.HttpReader` and :py:class:`~pysmi.reader.ftpclient.FtpReader` classes for in-depth information on ASN.1 MIB lookup. Examples -------- >>> ObjectIdentity('SNMPv2-MIB', 'sysDescr').addAsn1Source('http://mibs.snmplabs.com/asn1/@mib@') ObjectIdentity('SNMPv2-MIB', 'sysDescr') >>> ] if compare[name[self]._asn1SourcesToAdd is constant[None]] begin[:] name[self]._asn1SourcesToAdd assign[=] name[asn1Sources] if name[self]._asn1SourcesOptions begin[:] call[name[self]._asn1SourcesOptions.update, parameter[name[kwargs]]] return[name[self]]
keyword[def] identifier[addAsn1MibSource] ( identifier[self] ,* identifier[asn1Sources] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[self] . identifier[_asn1SourcesToAdd] keyword[is] keyword[None] : identifier[self] . identifier[_asn1SourcesToAdd] = identifier[asn1Sources] keyword[else] : identifier[self] . identifier[_asn1SourcesToAdd] += identifier[asn1Sources] keyword[if] identifier[self] . identifier[_asn1SourcesOptions] : identifier[self] . identifier[_asn1SourcesOptions] . identifier[update] ( identifier[kwargs] ) keyword[else] : identifier[self] . identifier[_asn1SourcesOptions] = identifier[kwargs] keyword[return] identifier[self]
def addAsn1MibSource(self, *asn1Sources, **kwargs): """Adds path to a repository to search ASN.1 MIB files. Parameters ---------- *asn1Sources : one or more URL in form of :py:obj:`str` identifying local or remote ASN.1 MIB repositories. Path must include the *@mib@* component which will be replaced with MIB module name at the time of search. Returns ------- : :py:class:`~pysnmp.smi.rfc1902.ObjectIdentity` reference to itself Notes ----- Please refer to :py:class:`~pysmi.reader.localfile.FileReader`, :py:class:`~pysmi.reader.httpclient.HttpReader` and :py:class:`~pysmi.reader.ftpclient.FtpReader` classes for in-depth information on ASN.1 MIB lookup. Examples -------- >>> ObjectIdentity('SNMPv2-MIB', 'sysDescr').addAsn1Source('http://mibs.snmplabs.com/asn1/@mib@') ObjectIdentity('SNMPv2-MIB', 'sysDescr') >>> """ if self._asn1SourcesToAdd is None: self._asn1SourcesToAdd = asn1Sources # depends on [control=['if'], data=[]] else: self._asn1SourcesToAdd += asn1Sources if self._asn1SourcesOptions: self._asn1SourcesOptions.update(kwargs) # depends on [control=['if'], data=[]] else: self._asn1SourcesOptions = kwargs return self
def cipheringModeCommand(): """CIPHERING MODE COMMAND Section 9.1.9""" a = TpPd(pd=0x6) b = MessageType(mesType=0x35) # 00110101 c = RrCause() #d=cipherModeSetting() #e=cipherResponse() # FIX d = CipherModeSettingAndcipherResponse() packet = a / b / c / d return packet
def function[cipheringModeCommand, parameter[]]: constant[CIPHERING MODE COMMAND Section 9.1.9] variable[a] assign[=] call[name[TpPd], parameter[]] variable[b] assign[=] call[name[MessageType], parameter[]] variable[c] assign[=] call[name[RrCause], parameter[]] variable[d] assign[=] call[name[CipherModeSettingAndcipherResponse], parameter[]] variable[packet] assign[=] binary_operation[binary_operation[binary_operation[name[a] / name[b]] / name[c]] / name[d]] return[name[packet]]
keyword[def] identifier[cipheringModeCommand] (): literal[string] identifier[a] = identifier[TpPd] ( identifier[pd] = literal[int] ) identifier[b] = identifier[MessageType] ( identifier[mesType] = literal[int] ) identifier[c] = identifier[RrCause] () identifier[d] = identifier[CipherModeSettingAndcipherResponse] () identifier[packet] = identifier[a] / identifier[b] / identifier[c] / identifier[d] keyword[return] identifier[packet]
def cipheringModeCommand(): """CIPHERING MODE COMMAND Section 9.1.9""" a = TpPd(pd=6) b = MessageType(mesType=53) # 00110101 c = RrCause() #d=cipherModeSetting() #e=cipherResponse() # FIX d = CipherModeSettingAndcipherResponse() packet = a / b / c / d return packet
def dump_next(self): """Dump the next reading from the stream. Returns: IOTileReading: The next reading or None if there isn't one """ if self.dump_walker is None: return pack_error(ControllerSubsystem.SENSOR_LOG, SensorLogError.STREAM_WALKER_NOT_INITIALIZED) try: return self.dump_walker.pop() except StreamEmptyError: return None
def function[dump_next, parameter[self]]: constant[Dump the next reading from the stream. Returns: IOTileReading: The next reading or None if there isn't one ] if compare[name[self].dump_walker is constant[None]] begin[:] return[call[name[pack_error], parameter[name[ControllerSubsystem].SENSOR_LOG, name[SensorLogError].STREAM_WALKER_NOT_INITIALIZED]]] <ast.Try object at 0x7da2046216f0>
keyword[def] identifier[dump_next] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[dump_walker] keyword[is] keyword[None] : keyword[return] identifier[pack_error] ( identifier[ControllerSubsystem] . identifier[SENSOR_LOG] , identifier[SensorLogError] . identifier[STREAM_WALKER_NOT_INITIALIZED] ) keyword[try] : keyword[return] identifier[self] . identifier[dump_walker] . identifier[pop] () keyword[except] identifier[StreamEmptyError] : keyword[return] keyword[None]
def dump_next(self): """Dump the next reading from the stream. Returns: IOTileReading: The next reading or None if there isn't one """ if self.dump_walker is None: return pack_error(ControllerSubsystem.SENSOR_LOG, SensorLogError.STREAM_WALKER_NOT_INITIALIZED) # depends on [control=['if'], data=[]] try: return self.dump_walker.pop() # depends on [control=['try'], data=[]] except StreamEmptyError: return None # depends on [control=['except'], data=[]]
def get_msg_display_string(self, msgid): """Generates a user-consumable representation of a message. Can be just the message ID or the ID and the symbol. """ message_definitions = self.get_message_definitions(msgid) if len(message_definitions) == 1: return repr(message_definitions[0].symbol) return repr([md.symbol for md in message_definitions])
def function[get_msg_display_string, parameter[self, msgid]]: constant[Generates a user-consumable representation of a message. Can be just the message ID or the ID and the symbol. ] variable[message_definitions] assign[=] call[name[self].get_message_definitions, parameter[name[msgid]]] if compare[call[name[len], parameter[name[message_definitions]]] equal[==] constant[1]] begin[:] return[call[name[repr], parameter[call[name[message_definitions]][constant[0]].symbol]]] return[call[name[repr], parameter[<ast.ListComp object at 0x7da1b024dff0>]]]
keyword[def] identifier[get_msg_display_string] ( identifier[self] , identifier[msgid] ): literal[string] identifier[message_definitions] = identifier[self] . identifier[get_message_definitions] ( identifier[msgid] ) keyword[if] identifier[len] ( identifier[message_definitions] )== literal[int] : keyword[return] identifier[repr] ( identifier[message_definitions] [ literal[int] ]. identifier[symbol] ) keyword[return] identifier[repr] ([ identifier[md] . identifier[symbol] keyword[for] identifier[md] keyword[in] identifier[message_definitions] ])
def get_msg_display_string(self, msgid): """Generates a user-consumable representation of a message. Can be just the message ID or the ID and the symbol. """ message_definitions = self.get_message_definitions(msgid) if len(message_definitions) == 1: return repr(message_definitions[0].symbol) # depends on [control=['if'], data=[]] return repr([md.symbol for md in message_definitions])
def mkfs(*devices, **kwargs): ''' Create a file system on the specified device. By default wipes out with force. General options: * **allocsize**: Specify the BTRFS offset from the start of the device. * **bytecount**: Specify the size of the resultant filesystem. * **nodesize**: Node size. * **leafsize**: Specify the nodesize, the tree block size in which btrfs stores data. * **noforce**: Prevent force overwrite when an existing filesystem is detected on the device. * **sectorsize**: Specify the sectorsize, the minimum data block allocation unit. * **nodiscard**: Do not perform whole device TRIM operation by default. * **uuid**: Pass UUID or pass True to generate one. Options: * **dto**: (raid0|raid1|raid5|raid6|raid10|single|dup) Specify how the data must be spanned across the devices specified. * **mto**: (raid0|raid1|raid5|raid6|raid10|single|dup) Specify how metadata must be spanned across the devices specified. * **fts**: Features (call ``salt <host> btrfs.features`` for full list of available features) See the ``mkfs.btrfs(8)`` manpage for a more complete description of corresponding options description. CLI Example: .. code-block:: bash salt '*' btrfs.mkfs /dev/sda1 salt '*' btrfs.mkfs /dev/sda1 noforce=True ''' if not devices: raise CommandExecutionError("No devices specified") mounts = salt.utils.fsutils._get_mounts("btrfs") for device in devices: if mounts.get(device): raise CommandExecutionError("Device \"{0}\" should not be mounted".format(device)) cmd = ["mkfs.btrfs"] dto = kwargs.get("dto") mto = kwargs.get("mto") if len(devices) == 1: if dto: cmd.append("-d single") if mto: cmd.append("-m single") else: if dto: cmd.append("-d {0}".format(dto)) if mto: cmd.append("-m {0}".format(mto)) for key, option in [("-l", "leafsize"), ("-L", "label"), ("-O", "fts"), ("-A", "allocsize"), ("-b", "bytecount"), ("-n", "nodesize"), ("-s", "sectorsize")]: if option == 'label' and option in kwargs: kwargs['label'] = "'{0}'".format(kwargs["label"]) if kwargs.get(option): cmd.append("{0} {1}".format(key, kwargs.get(option))) if kwargs.get("uuid"): cmd.append("-U {0}".format(kwargs.get("uuid") is True and uuid.uuid1() or kwargs.get("uuid"))) if kwargs.get("nodiscard"): cmd.append("-K") if not kwargs.get("noforce"): cmd.append("-f") cmd.extend(devices) out = __salt__['cmd.run_all'](' '.join(cmd)) salt.utils.fsutils._verify_run(out) ret = {'log': out['stdout']} ret.update(__salt__['btrfs.info'](devices[0])) return ret
def function[mkfs, parameter[]]: constant[ Create a file system on the specified device. By default wipes out with force. General options: * **allocsize**: Specify the BTRFS offset from the start of the device. * **bytecount**: Specify the size of the resultant filesystem. * **nodesize**: Node size. * **leafsize**: Specify the nodesize, the tree block size in which btrfs stores data. * **noforce**: Prevent force overwrite when an existing filesystem is detected on the device. * **sectorsize**: Specify the sectorsize, the minimum data block allocation unit. * **nodiscard**: Do not perform whole device TRIM operation by default. * **uuid**: Pass UUID or pass True to generate one. Options: * **dto**: (raid0|raid1|raid5|raid6|raid10|single|dup) Specify how the data must be spanned across the devices specified. * **mto**: (raid0|raid1|raid5|raid6|raid10|single|dup) Specify how metadata must be spanned across the devices specified. * **fts**: Features (call ``salt <host> btrfs.features`` for full list of available features) See the ``mkfs.btrfs(8)`` manpage for a more complete description of corresponding options description. CLI Example: .. code-block:: bash salt '*' btrfs.mkfs /dev/sda1 salt '*' btrfs.mkfs /dev/sda1 noforce=True ] if <ast.UnaryOp object at 0x7da1b1f94100> begin[:] <ast.Raise object at 0x7da1b1f96230> variable[mounts] assign[=] call[name[salt].utils.fsutils._get_mounts, parameter[constant[btrfs]]] for taget[name[device]] in starred[name[devices]] begin[:] if call[name[mounts].get, parameter[name[device]]] begin[:] <ast.Raise object at 0x7da1b1f956f0> variable[cmd] assign[=] list[[<ast.Constant object at 0x7da1b1f94310>]] variable[dto] assign[=] call[name[kwargs].get, parameter[constant[dto]]] variable[mto] assign[=] call[name[kwargs].get, parameter[constant[mto]]] if compare[call[name[len], parameter[name[devices]]] equal[==] constant[1]] begin[:] if name[dto] begin[:] call[name[cmd].append, parameter[constant[-d single]]] if name[mto] begin[:] call[name[cmd].append, parameter[constant[-m single]]] for taget[tuple[[<ast.Name object at 0x7da1b1f96ef0>, <ast.Name object at 0x7da1b1f968f0>]]] in starred[list[[<ast.Tuple object at 0x7da1b1f97880>, <ast.Tuple object at 0x7da1b1f96440>, <ast.Tuple object at 0x7da1b1f94070>, <ast.Tuple object at 0x7da1b1f96470>, <ast.Tuple object at 0x7da1b1f97a30>, <ast.Tuple object at 0x7da1b1f97dc0>, <ast.Tuple object at 0x7da1b1f97160>]]] begin[:] if <ast.BoolOp object at 0x7da1b1f97460> begin[:] call[name[kwargs]][constant[label]] assign[=] call[constant['{0}'].format, parameter[call[name[kwargs]][constant[label]]]] if call[name[kwargs].get, parameter[name[option]]] begin[:] call[name[cmd].append, parameter[call[constant[{0} {1}].format, parameter[name[key], call[name[kwargs].get, parameter[name[option]]]]]]] if call[name[kwargs].get, parameter[constant[uuid]]] begin[:] call[name[cmd].append, parameter[call[constant[-U {0}].format, parameter[<ast.BoolOp object at 0x7da1b1f94340>]]]] if call[name[kwargs].get, parameter[constant[nodiscard]]] begin[:] call[name[cmd].append, parameter[constant[-K]]] if <ast.UnaryOp object at 0x7da1b1f95660> begin[:] call[name[cmd].append, parameter[constant[-f]]] call[name[cmd].extend, parameter[name[devices]]] variable[out] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[call[constant[ ].join, parameter[name[cmd]]]]] call[name[salt].utils.fsutils._verify_run, parameter[name[out]]] variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b1f944c0>], [<ast.Subscript object at 0x7da1b1f96410>]] call[name[ret].update, parameter[call[call[name[__salt__]][constant[btrfs.info]], parameter[call[name[devices]][constant[0]]]]]] return[name[ret]]
keyword[def] identifier[mkfs] (* identifier[devices] ,** identifier[kwargs] ): literal[string] keyword[if] keyword[not] identifier[devices] : keyword[raise] identifier[CommandExecutionError] ( literal[string] ) identifier[mounts] = identifier[salt] . identifier[utils] . identifier[fsutils] . identifier[_get_mounts] ( literal[string] ) keyword[for] identifier[device] keyword[in] identifier[devices] : keyword[if] identifier[mounts] . identifier[get] ( identifier[device] ): keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( identifier[device] )) identifier[cmd] =[ literal[string] ] identifier[dto] = identifier[kwargs] . identifier[get] ( literal[string] ) identifier[mto] = identifier[kwargs] . identifier[get] ( literal[string] ) keyword[if] identifier[len] ( identifier[devices] )== literal[int] : keyword[if] identifier[dto] : identifier[cmd] . identifier[append] ( literal[string] ) keyword[if] identifier[mto] : identifier[cmd] . identifier[append] ( literal[string] ) keyword[else] : keyword[if] identifier[dto] : identifier[cmd] . identifier[append] ( literal[string] . identifier[format] ( identifier[dto] )) keyword[if] identifier[mto] : identifier[cmd] . identifier[append] ( literal[string] . identifier[format] ( identifier[mto] )) keyword[for] identifier[key] , identifier[option] keyword[in] [( literal[string] , literal[string] ),( literal[string] , literal[string] ),( literal[string] , literal[string] ), ( literal[string] , literal[string] ),( literal[string] , literal[string] ),( literal[string] , literal[string] ), ( literal[string] , literal[string] )]: keyword[if] identifier[option] == literal[string] keyword[and] identifier[option] keyword[in] identifier[kwargs] : identifier[kwargs] [ literal[string] ]= literal[string] . identifier[format] ( identifier[kwargs] [ literal[string] ]) keyword[if] identifier[kwargs] . identifier[get] ( identifier[option] ): identifier[cmd] . identifier[append] ( literal[string] . identifier[format] ( identifier[key] , identifier[kwargs] . identifier[get] ( identifier[option] ))) keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): identifier[cmd] . identifier[append] ( literal[string] . identifier[format] ( identifier[kwargs] . identifier[get] ( literal[string] ) keyword[is] keyword[True] keyword[and] identifier[uuid] . identifier[uuid1] () keyword[or] identifier[kwargs] . identifier[get] ( literal[string] ))) keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): identifier[cmd] . identifier[append] ( literal[string] ) keyword[if] keyword[not] identifier[kwargs] . identifier[get] ( literal[string] ): identifier[cmd] . identifier[append] ( literal[string] ) identifier[cmd] . identifier[extend] ( identifier[devices] ) identifier[out] = identifier[__salt__] [ literal[string] ]( literal[string] . identifier[join] ( identifier[cmd] )) identifier[salt] . identifier[utils] . identifier[fsutils] . identifier[_verify_run] ( identifier[out] ) identifier[ret] ={ literal[string] : identifier[out] [ literal[string] ]} identifier[ret] . identifier[update] ( identifier[__salt__] [ literal[string] ]( identifier[devices] [ literal[int] ])) keyword[return] identifier[ret]
def mkfs(*devices, **kwargs): """ Create a file system on the specified device. By default wipes out with force. General options: * **allocsize**: Specify the BTRFS offset from the start of the device. * **bytecount**: Specify the size of the resultant filesystem. * **nodesize**: Node size. * **leafsize**: Specify the nodesize, the tree block size in which btrfs stores data. * **noforce**: Prevent force overwrite when an existing filesystem is detected on the device. * **sectorsize**: Specify the sectorsize, the minimum data block allocation unit. * **nodiscard**: Do not perform whole device TRIM operation by default. * **uuid**: Pass UUID or pass True to generate one. Options: * **dto**: (raid0|raid1|raid5|raid6|raid10|single|dup) Specify how the data must be spanned across the devices specified. * **mto**: (raid0|raid1|raid5|raid6|raid10|single|dup) Specify how metadata must be spanned across the devices specified. * **fts**: Features (call ``salt <host> btrfs.features`` for full list of available features) See the ``mkfs.btrfs(8)`` manpage for a more complete description of corresponding options description. CLI Example: .. code-block:: bash salt '*' btrfs.mkfs /dev/sda1 salt '*' btrfs.mkfs /dev/sda1 noforce=True """ if not devices: raise CommandExecutionError('No devices specified') # depends on [control=['if'], data=[]] mounts = salt.utils.fsutils._get_mounts('btrfs') for device in devices: if mounts.get(device): raise CommandExecutionError('Device "{0}" should not be mounted'.format(device)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['device']] cmd = ['mkfs.btrfs'] dto = kwargs.get('dto') mto = kwargs.get('mto') if len(devices) == 1: if dto: cmd.append('-d single') # depends on [control=['if'], data=[]] if mto: cmd.append('-m single') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: if dto: cmd.append('-d {0}'.format(dto)) # depends on [control=['if'], data=[]] if mto: cmd.append('-m {0}'.format(mto)) # depends on [control=['if'], data=[]] for (key, option) in [('-l', 'leafsize'), ('-L', 'label'), ('-O', 'fts'), ('-A', 'allocsize'), ('-b', 'bytecount'), ('-n', 'nodesize'), ('-s', 'sectorsize')]: if option == 'label' and option in kwargs: kwargs['label'] = "'{0}'".format(kwargs['label']) # depends on [control=['if'], data=[]] if kwargs.get(option): cmd.append('{0} {1}'.format(key, kwargs.get(option))) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] if kwargs.get('uuid'): cmd.append('-U {0}'.format(kwargs.get('uuid') is True and uuid.uuid1() or kwargs.get('uuid'))) # depends on [control=['if'], data=[]] if kwargs.get('nodiscard'): cmd.append('-K') # depends on [control=['if'], data=[]] if not kwargs.get('noforce'): cmd.append('-f') # depends on [control=['if'], data=[]] cmd.extend(devices) out = __salt__['cmd.run_all'](' '.join(cmd)) salt.utils.fsutils._verify_run(out) ret = {'log': out['stdout']} ret.update(__salt__['btrfs.info'](devices[0])) return ret
def mac(): """ Get MAC. """ from uuid import getnode as get_mac return ':'.join(("%012x" % get_mac())[i:i+2] for i in range(0, 12, 2))
def function[mac, parameter[]]: constant[ Get MAC. ] from relative_module[uuid] import module[getnode] return[call[constant[:].join, parameter[<ast.GeneratorExp object at 0x7da1b02e5f90>]]]
keyword[def] identifier[mac] (): literal[string] keyword[from] identifier[uuid] keyword[import] identifier[getnode] keyword[as] identifier[get_mac] keyword[return] literal[string] . identifier[join] (( literal[string] % identifier[get_mac] ())[ identifier[i] : identifier[i] + literal[int] ] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , literal[int] , literal[int] ))
def mac(): """ Get MAC. """ from uuid import getnode as get_mac return ':'.join((('%012x' % get_mac())[i:i + 2] for i in range(0, 12, 2)))
def remove_namespace(self, namespace): """This removes a Namespace object from the socket. This is usually called by :meth:`~socketio.namespace.BaseNamespace.disconnect`. """ if namespace in self.active_ns: del self.active_ns[namespace] if len(self.active_ns) == 0 and self.connected: self.kill(detach=True)
def function[remove_namespace, parameter[self, namespace]]: constant[This removes a Namespace object from the socket. This is usually called by :meth:`~socketio.namespace.BaseNamespace.disconnect`. ] if compare[name[namespace] in name[self].active_ns] begin[:] <ast.Delete object at 0x7da18f09d8a0> if <ast.BoolOp object at 0x7da18f09fdc0> begin[:] call[name[self].kill, parameter[]]
keyword[def] identifier[remove_namespace] ( identifier[self] , identifier[namespace] ): literal[string] keyword[if] identifier[namespace] keyword[in] identifier[self] . identifier[active_ns] : keyword[del] identifier[self] . identifier[active_ns] [ identifier[namespace] ] keyword[if] identifier[len] ( identifier[self] . identifier[active_ns] )== literal[int] keyword[and] identifier[self] . identifier[connected] : identifier[self] . identifier[kill] ( identifier[detach] = keyword[True] )
def remove_namespace(self, namespace): """This removes a Namespace object from the socket. This is usually called by :meth:`~socketio.namespace.BaseNamespace.disconnect`. """ if namespace in self.active_ns: del self.active_ns[namespace] # depends on [control=['if'], data=['namespace']] if len(self.active_ns) == 0 and self.connected: self.kill(detach=True) # depends on [control=['if'], data=[]]
def dumps(obj): """ Serializes a dictionary into Manifest data. :param obj: A dictionary to serialize. :return: A file object. """ if not isinstance(obj, dict): raise TypeError('can only dump a dictionary as a Manifest but got ' + type(obj).__name__) data = [] int32 = struct.Struct('<I') for message_name in ('payload', 'metadata', 'signature'): message_data = obj[message_name] message_id = MSG_IDS[message_name] message_class = MessageClass[message_id] message = dict_to_protobuf(message_class, message_data) message_bytes = message.SerializeToString() message_size = len(message_bytes) data.append(int32.pack(message_id)) data.append(int32.pack(message_size)) data.append(message_bytes) # MSG_EOF marks the end of messages. data.append(int32.pack(MSG_EOF)) return b''.join(data)
def function[dumps, parameter[obj]]: constant[ Serializes a dictionary into Manifest data. :param obj: A dictionary to serialize. :return: A file object. ] if <ast.UnaryOp object at 0x7da20cabf730> begin[:] <ast.Raise object at 0x7da20cabd690> variable[data] assign[=] list[[]] variable[int32] assign[=] call[name[struct].Struct, parameter[constant[<I]]] for taget[name[message_name]] in starred[tuple[[<ast.Constant object at 0x7da20cabf790>, <ast.Constant object at 0x7da20cabc700>, <ast.Constant object at 0x7da20cabe920>]]] begin[:] variable[message_data] assign[=] call[name[obj]][name[message_name]] variable[message_id] assign[=] call[name[MSG_IDS]][name[message_name]] variable[message_class] assign[=] call[name[MessageClass]][name[message_id]] variable[message] assign[=] call[name[dict_to_protobuf], parameter[name[message_class], name[message_data]]] variable[message_bytes] assign[=] call[name[message].SerializeToString, parameter[]] variable[message_size] assign[=] call[name[len], parameter[name[message_bytes]]] call[name[data].append, parameter[call[name[int32].pack, parameter[name[message_id]]]]] call[name[data].append, parameter[call[name[int32].pack, parameter[name[message_size]]]]] call[name[data].append, parameter[name[message_bytes]]] call[name[data].append, parameter[call[name[int32].pack, parameter[name[MSG_EOF]]]]] return[call[constant[b''].join, parameter[name[data]]]]
keyword[def] identifier[dumps] ( identifier[obj] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[obj] , identifier[dict] ): keyword[raise] identifier[TypeError] ( literal[string] + identifier[type] ( identifier[obj] ). identifier[__name__] ) identifier[data] =[] identifier[int32] = identifier[struct] . identifier[Struct] ( literal[string] ) keyword[for] identifier[message_name] keyword[in] ( literal[string] , literal[string] , literal[string] ): identifier[message_data] = identifier[obj] [ identifier[message_name] ] identifier[message_id] = identifier[MSG_IDS] [ identifier[message_name] ] identifier[message_class] = identifier[MessageClass] [ identifier[message_id] ] identifier[message] = identifier[dict_to_protobuf] ( identifier[message_class] , identifier[message_data] ) identifier[message_bytes] = identifier[message] . identifier[SerializeToString] () identifier[message_size] = identifier[len] ( identifier[message_bytes] ) identifier[data] . identifier[append] ( identifier[int32] . identifier[pack] ( identifier[message_id] )) identifier[data] . identifier[append] ( identifier[int32] . identifier[pack] ( identifier[message_size] )) identifier[data] . identifier[append] ( identifier[message_bytes] ) identifier[data] . identifier[append] ( identifier[int32] . identifier[pack] ( identifier[MSG_EOF] )) keyword[return] literal[string] . identifier[join] ( identifier[data] )
def dumps(obj): """ Serializes a dictionary into Manifest data. :param obj: A dictionary to serialize. :return: A file object. """ if not isinstance(obj, dict): raise TypeError('can only dump a dictionary as a Manifest but got ' + type(obj).__name__) # depends on [control=['if'], data=[]] data = [] int32 = struct.Struct('<I') for message_name in ('payload', 'metadata', 'signature'): message_data = obj[message_name] message_id = MSG_IDS[message_name] message_class = MessageClass[message_id] message = dict_to_protobuf(message_class, message_data) message_bytes = message.SerializeToString() message_size = len(message_bytes) data.append(int32.pack(message_id)) data.append(int32.pack(message_size)) data.append(message_bytes) # depends on [control=['for'], data=['message_name']] # MSG_EOF marks the end of messages. data.append(int32.pack(MSG_EOF)) return b''.join(data)
def get_config_value(self, section, name=None, config_file=None): """ Returns configuration value for a given [``section``] and ``name``. :param section: Section we want to retrieve value from :param name: Name of configuration we want to retrieve :param config_file: A path to file which should be used to retrieve configuration from (might also be a list of file paths) """ if config_file is None: config_file = [] elif isinstance(config_file, basestring): config_file = [config_file] config = self._repo.ui for path in config_file: config.readconfig(path) return config.config(section, name)
def function[get_config_value, parameter[self, section, name, config_file]]: constant[ Returns configuration value for a given [``section``] and ``name``. :param section: Section we want to retrieve value from :param name: Name of configuration we want to retrieve :param config_file: A path to file which should be used to retrieve configuration from (might also be a list of file paths) ] if compare[name[config_file] is constant[None]] begin[:] variable[config_file] assign[=] list[[]] variable[config] assign[=] name[self]._repo.ui for taget[name[path]] in starred[name[config_file]] begin[:] call[name[config].readconfig, parameter[name[path]]] return[call[name[config].config, parameter[name[section], name[name]]]]
keyword[def] identifier[get_config_value] ( identifier[self] , identifier[section] , identifier[name] = keyword[None] , identifier[config_file] = keyword[None] ): literal[string] keyword[if] identifier[config_file] keyword[is] keyword[None] : identifier[config_file] =[] keyword[elif] identifier[isinstance] ( identifier[config_file] , identifier[basestring] ): identifier[config_file] =[ identifier[config_file] ] identifier[config] = identifier[self] . identifier[_repo] . identifier[ui] keyword[for] identifier[path] keyword[in] identifier[config_file] : identifier[config] . identifier[readconfig] ( identifier[path] ) keyword[return] identifier[config] . identifier[config] ( identifier[section] , identifier[name] )
def get_config_value(self, section, name=None, config_file=None): """ Returns configuration value for a given [``section``] and ``name``. :param section: Section we want to retrieve value from :param name: Name of configuration we want to retrieve :param config_file: A path to file which should be used to retrieve configuration from (might also be a list of file paths) """ if config_file is None: config_file = [] # depends on [control=['if'], data=['config_file']] elif isinstance(config_file, basestring): config_file = [config_file] # depends on [control=['if'], data=[]] config = self._repo.ui for path in config_file: config.readconfig(path) # depends on [control=['for'], data=['path']] return config.config(section, name)
def to_str(obj): """ convert a object to string """ if isinstance(obj, str): return obj if isinstance(obj, unicode): return obj.encode('utf-8') return str(obj)
def function[to_str, parameter[obj]]: constant[ convert a object to string ] if call[name[isinstance], parameter[name[obj], name[str]]] begin[:] return[name[obj]] if call[name[isinstance], parameter[name[obj], name[unicode]]] begin[:] return[call[name[obj].encode, parameter[constant[utf-8]]]] return[call[name[str], parameter[name[obj]]]]
keyword[def] identifier[to_str] ( identifier[obj] ): literal[string] keyword[if] identifier[isinstance] ( identifier[obj] , identifier[str] ): keyword[return] identifier[obj] keyword[if] identifier[isinstance] ( identifier[obj] , identifier[unicode] ): keyword[return] identifier[obj] . identifier[encode] ( literal[string] ) keyword[return] identifier[str] ( identifier[obj] )
def to_str(obj): """ convert a object to string """ if isinstance(obj, str): return obj # depends on [control=['if'], data=[]] if isinstance(obj, unicode): return obj.encode('utf-8') # depends on [control=['if'], data=[]] return str(obj)
def html_factory(tag, **defaults): '''Returns an :class:`Html` factory function for ``tag`` and a given dictionary of ``defaults`` parameters. For example:: >>> input_factory = html_factory('input', type='text') >>> html = input_factory(value='bla') ''' def html_input(*children, **params): p = defaults.copy() p.update(params) return Html(tag, *children, **p) return html_input
def function[html_factory, parameter[tag]]: constant[Returns an :class:`Html` factory function for ``tag`` and a given dictionary of ``defaults`` parameters. For example:: >>> input_factory = html_factory('input', type='text') >>> html = input_factory(value='bla') ] def function[html_input, parameter[]]: variable[p] assign[=] call[name[defaults].copy, parameter[]] call[name[p].update, parameter[name[params]]] return[call[name[Html], parameter[name[tag], <ast.Starred object at 0x7da18f58d4e0>]]] return[name[html_input]]
keyword[def] identifier[html_factory] ( identifier[tag] ,** identifier[defaults] ): literal[string] keyword[def] identifier[html_input] (* identifier[children] ,** identifier[params] ): identifier[p] = identifier[defaults] . identifier[copy] () identifier[p] . identifier[update] ( identifier[params] ) keyword[return] identifier[Html] ( identifier[tag] ,* identifier[children] ,** identifier[p] ) keyword[return] identifier[html_input]
def html_factory(tag, **defaults): """Returns an :class:`Html` factory function for ``tag`` and a given dictionary of ``defaults`` parameters. For example:: >>> input_factory = html_factory('input', type='text') >>> html = input_factory(value='bla') """ def html_input(*children, **params): p = defaults.copy() p.update(params) return Html(tag, *children, **p) return html_input
def install_passband(fname, local=True): """ Install a passband from a local file. This simply copies the file into the install path - but beware that clearing the installation will clear the passband as well If local=False, you must have permissions to access the installation directory """ pbdir = _pbdir_local if local else _pbdir_global shutil.copy(fname, pbdir) init_passband(os.path.join(pbdir, fname))
def function[install_passband, parameter[fname, local]]: constant[ Install a passband from a local file. This simply copies the file into the install path - but beware that clearing the installation will clear the passband as well If local=False, you must have permissions to access the installation directory ] variable[pbdir] assign[=] <ast.IfExp object at 0x7da2054a7520> call[name[shutil].copy, parameter[name[fname], name[pbdir]]] call[name[init_passband], parameter[call[name[os].path.join, parameter[name[pbdir], name[fname]]]]]
keyword[def] identifier[install_passband] ( identifier[fname] , identifier[local] = keyword[True] ): literal[string] identifier[pbdir] = identifier[_pbdir_local] keyword[if] identifier[local] keyword[else] identifier[_pbdir_global] identifier[shutil] . identifier[copy] ( identifier[fname] , identifier[pbdir] ) identifier[init_passband] ( identifier[os] . identifier[path] . identifier[join] ( identifier[pbdir] , identifier[fname] ))
def install_passband(fname, local=True): """ Install a passband from a local file. This simply copies the file into the install path - but beware that clearing the installation will clear the passband as well If local=False, you must have permissions to access the installation directory """ pbdir = _pbdir_local if local else _pbdir_global shutil.copy(fname, pbdir) init_passband(os.path.join(pbdir, fname))
def is_missing_variable( value_node: ValueNode, variables: Dict[str, Any] = None ) -> bool: """Check if `value_node` is a variable not defined in the `variables` dict.""" return isinstance(value_node, VariableNode) and ( not variables or is_invalid(variables.get(value_node.name.value, INVALID)) )
def function[is_missing_variable, parameter[value_node, variables]]: constant[Check if `value_node` is a variable not defined in the `variables` dict.] return[<ast.BoolOp object at 0x7da1b1d494e0>]
keyword[def] identifier[is_missing_variable] ( identifier[value_node] : identifier[ValueNode] , identifier[variables] : identifier[Dict] [ identifier[str] , identifier[Any] ]= keyword[None] )-> identifier[bool] : literal[string] keyword[return] identifier[isinstance] ( identifier[value_node] , identifier[VariableNode] ) keyword[and] ( keyword[not] identifier[variables] keyword[or] identifier[is_invalid] ( identifier[variables] . identifier[get] ( identifier[value_node] . identifier[name] . identifier[value] , identifier[INVALID] )) )
def is_missing_variable(value_node: ValueNode, variables: Dict[str, Any]=None) -> bool: """Check if `value_node` is a variable not defined in the `variables` dict.""" return isinstance(value_node, VariableNode) and (not variables or is_invalid(variables.get(value_node.name.value, INVALID)))
def horz_offset(self, offset): """ Set the value of ./c:manualLayout/c:x@val to *offset* and ./c:manualLayout/c:xMode@val to "factor". Remove ./c:manualLayout if *offset* == 0. """ if offset == 0.0: self._remove_manualLayout() return manualLayout = self.get_or_add_manualLayout() manualLayout.horz_offset = offset
def function[horz_offset, parameter[self, offset]]: constant[ Set the value of ./c:manualLayout/c:x@val to *offset* and ./c:manualLayout/c:xMode@val to "factor". Remove ./c:manualLayout if *offset* == 0. ] if compare[name[offset] equal[==] constant[0.0]] begin[:] call[name[self]._remove_manualLayout, parameter[]] return[None] variable[manualLayout] assign[=] call[name[self].get_or_add_manualLayout, parameter[]] name[manualLayout].horz_offset assign[=] name[offset]
keyword[def] identifier[horz_offset] ( identifier[self] , identifier[offset] ): literal[string] keyword[if] identifier[offset] == literal[int] : identifier[self] . identifier[_remove_manualLayout] () keyword[return] identifier[manualLayout] = identifier[self] . identifier[get_or_add_manualLayout] () identifier[manualLayout] . identifier[horz_offset] = identifier[offset]
def horz_offset(self, offset): """ Set the value of ./c:manualLayout/c:x@val to *offset* and ./c:manualLayout/c:xMode@val to "factor". Remove ./c:manualLayout if *offset* == 0. """ if offset == 0.0: self._remove_manualLayout() return # depends on [control=['if'], data=[]] manualLayout = self.get_or_add_manualLayout() manualLayout.horz_offset = offset
def BVS(self, name, size, min=None, max=None, stride=None, uninitialized=False, explicit_name=None, key=None, eternal=False, inspect=True, events=True, **kwargs): #pylint:disable=redefined-builtin """ Creates a bit-vector symbol (i.e., a variable). Other keyword parameters are passed directly on to the constructor of claripy.ast.BV. :param name: The name of the symbol. :param size: The size (in bits) of the bit-vector. :param min: The minimum value of the symbol. Note that this **only** work when using VSA. :param max: The maximum value of the symbol. Note that this **only** work when using VSA. :param stride: The stride of the symbol. Note that this **only** work when using VSA. :param uninitialized: Whether this value should be counted as an "uninitialized" value in the course of an analysis. :param explicit_name: Set to True to prevent an identifier from appended to the name to ensure uniqueness. :param key: Set this to a tuple of increasingly specific identifiers (for example, ``('mem', 0xffbeff00)`` or ``('file', 4, 0x20)`` to cause it to be tracked, i.e. accessable through ``solver.get_variables``. :param eternal: Set to True in conjunction with setting a key to cause all states with the same ancestry to retrieve the same symbol when trying to create the value. If False, a counter will be appended to the key. :param inspect: Set to False to avoid firing SimInspect breakpoints :param events: Set to False to avoid generating a SimEvent for the occasion :return: A BV object representing this symbol. """ # should this be locked for multithreading? if key is not None and eternal and key in self.eternal_tracked_variables: r = self.eternal_tracked_variables[key] # pylint: disable=too-many-boolean-expressions if size != r.length or min != r.args[1] or max != r.args[2] or stride != r.args[3] or uninitialized != r.args[4] or bool(explicit_name) ^ (r.args[0] == name): l.warning("Variable %s being retrieved with differnt settings than it was tracked with", name) else: r = claripy.BVS(name, size, min=min, max=max, stride=stride, uninitialized=uninitialized, explicit_name=explicit_name, **kwargs) if key is not None: self.register_variable(r, key, eternal) if inspect: self.state._inspect('symbolic_variable', BP_AFTER, symbolic_name=next(iter(r.variables)), symbolic_size=size, symbolic_expr=r) if events: self.state.history.add_event('unconstrained', name=next(iter(r.variables)), bits=size, **kwargs) if o.TRACK_SOLVER_VARIABLES in self.state.options: self.all_variables = list(self.all_variables) self.all_variables.append(r) return r
def function[BVS, parameter[self, name, size, min, max, stride, uninitialized, explicit_name, key, eternal, inspect, events]]: constant[ Creates a bit-vector symbol (i.e., a variable). Other keyword parameters are passed directly on to the constructor of claripy.ast.BV. :param name: The name of the symbol. :param size: The size (in bits) of the bit-vector. :param min: The minimum value of the symbol. Note that this **only** work when using VSA. :param max: The maximum value of the symbol. Note that this **only** work when using VSA. :param stride: The stride of the symbol. Note that this **only** work when using VSA. :param uninitialized: Whether this value should be counted as an "uninitialized" value in the course of an analysis. :param explicit_name: Set to True to prevent an identifier from appended to the name to ensure uniqueness. :param key: Set this to a tuple of increasingly specific identifiers (for example, ``('mem', 0xffbeff00)`` or ``('file', 4, 0x20)`` to cause it to be tracked, i.e. accessable through ``solver.get_variables``. :param eternal: Set to True in conjunction with setting a key to cause all states with the same ancestry to retrieve the same symbol when trying to create the value. If False, a counter will be appended to the key. :param inspect: Set to False to avoid firing SimInspect breakpoints :param events: Set to False to avoid generating a SimEvent for the occasion :return: A BV object representing this symbol. ] if <ast.BoolOp object at 0x7da2044c1a80> begin[:] variable[r] assign[=] call[name[self].eternal_tracked_variables][name[key]] if <ast.BoolOp object at 0x7da2044c0d30> begin[:] call[name[l].warning, parameter[constant[Variable %s being retrieved with differnt settings than it was tracked with], name[name]]] if name[inspect] begin[:] call[name[self].state._inspect, parameter[constant[symbolic_variable], name[BP_AFTER]]] if name[events] begin[:] call[name[self].state.history.add_event, parameter[constant[unconstrained]]] if compare[name[o].TRACK_SOLVER_VARIABLES in name[self].state.options] begin[:] name[self].all_variables assign[=] call[name[list], parameter[name[self].all_variables]] call[name[self].all_variables.append, parameter[name[r]]] return[name[r]]
keyword[def] identifier[BVS] ( identifier[self] , identifier[name] , identifier[size] , identifier[min] = keyword[None] , identifier[max] = keyword[None] , identifier[stride] = keyword[None] , identifier[uninitialized] = keyword[False] , identifier[explicit_name] = keyword[None] , identifier[key] = keyword[None] , identifier[eternal] = keyword[False] , identifier[inspect] = keyword[True] , identifier[events] = keyword[True] , ** identifier[kwargs] ): literal[string] keyword[if] identifier[key] keyword[is] keyword[not] keyword[None] keyword[and] identifier[eternal] keyword[and] identifier[key] keyword[in] identifier[self] . identifier[eternal_tracked_variables] : identifier[r] = identifier[self] . identifier[eternal_tracked_variables] [ identifier[key] ] keyword[if] identifier[size] != identifier[r] . identifier[length] keyword[or] identifier[min] != identifier[r] . identifier[args] [ literal[int] ] keyword[or] identifier[max] != identifier[r] . identifier[args] [ literal[int] ] keyword[or] identifier[stride] != identifier[r] . identifier[args] [ literal[int] ] keyword[or] identifier[uninitialized] != identifier[r] . identifier[args] [ literal[int] ] keyword[or] identifier[bool] ( identifier[explicit_name] )^( identifier[r] . identifier[args] [ literal[int] ]== identifier[name] ): identifier[l] . identifier[warning] ( literal[string] , identifier[name] ) keyword[else] : identifier[r] = identifier[claripy] . identifier[BVS] ( identifier[name] , identifier[size] , identifier[min] = identifier[min] , identifier[max] = identifier[max] , identifier[stride] = identifier[stride] , identifier[uninitialized] = identifier[uninitialized] , identifier[explicit_name] = identifier[explicit_name] ,** identifier[kwargs] ) keyword[if] identifier[key] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[register_variable] ( identifier[r] , identifier[key] , identifier[eternal] ) keyword[if] identifier[inspect] : identifier[self] . identifier[state] . identifier[_inspect] ( literal[string] , identifier[BP_AFTER] , identifier[symbolic_name] = identifier[next] ( identifier[iter] ( identifier[r] . identifier[variables] )), identifier[symbolic_size] = identifier[size] , identifier[symbolic_expr] = identifier[r] ) keyword[if] identifier[events] : identifier[self] . identifier[state] . identifier[history] . identifier[add_event] ( literal[string] , identifier[name] = identifier[next] ( identifier[iter] ( identifier[r] . identifier[variables] )), identifier[bits] = identifier[size] ,** identifier[kwargs] ) keyword[if] identifier[o] . identifier[TRACK_SOLVER_VARIABLES] keyword[in] identifier[self] . identifier[state] . identifier[options] : identifier[self] . identifier[all_variables] = identifier[list] ( identifier[self] . identifier[all_variables] ) identifier[self] . identifier[all_variables] . identifier[append] ( identifier[r] ) keyword[return] identifier[r]
def BVS(self, name, size, min=None, max=None, stride=None, uninitialized=False, explicit_name=None, key=None, eternal=False, inspect=True, events=True, **kwargs): #pylint:disable=redefined-builtin '\n Creates a bit-vector symbol (i.e., a variable). Other keyword parameters are passed directly on to the\n constructor of claripy.ast.BV.\n\n :param name: The name of the symbol.\n :param size: The size (in bits) of the bit-vector.\n :param min: The minimum value of the symbol. Note that this **only** work when using VSA.\n :param max: The maximum value of the symbol. Note that this **only** work when using VSA.\n :param stride: The stride of the symbol. Note that this **only** work when using VSA.\n :param uninitialized: Whether this value should be counted as an "uninitialized" value in the course of an\n analysis.\n :param explicit_name: Set to True to prevent an identifier from appended to the name to ensure uniqueness.\n :param key: Set this to a tuple of increasingly specific identifiers (for example,\n ``(\'mem\', 0xffbeff00)`` or ``(\'file\', 4, 0x20)`` to cause it to be tracked, i.e.\n accessable through ``solver.get_variables``.\n :param eternal: Set to True in conjunction with setting a key to cause all states with the same\n ancestry to retrieve the same symbol when trying to create the value. If False, a\n counter will be appended to the key.\n :param inspect: Set to False to avoid firing SimInspect breakpoints\n :param events: Set to False to avoid generating a SimEvent for the occasion\n\n :return: A BV object representing this symbol.\n ' # should this be locked for multithreading? if key is not None and eternal and (key in self.eternal_tracked_variables): r = self.eternal_tracked_variables[key] # pylint: disable=too-many-boolean-expressions if size != r.length or min != r.args[1] or max != r.args[2] or (stride != r.args[3]) or (uninitialized != r.args[4]) or (bool(explicit_name) ^ (r.args[0] == name)): l.warning('Variable %s being retrieved with differnt settings than it was tracked with', name) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: r = claripy.BVS(name, size, min=min, max=max, stride=stride, uninitialized=uninitialized, explicit_name=explicit_name, **kwargs) if key is not None: self.register_variable(r, key, eternal) # depends on [control=['if'], data=['key']] if inspect: self.state._inspect('symbolic_variable', BP_AFTER, symbolic_name=next(iter(r.variables)), symbolic_size=size, symbolic_expr=r) # depends on [control=['if'], data=[]] if events: self.state.history.add_event('unconstrained', name=next(iter(r.variables)), bits=size, **kwargs) # depends on [control=['if'], data=[]] if o.TRACK_SOLVER_VARIABLES in self.state.options: self.all_variables = list(self.all_variables) self.all_variables.append(r) # depends on [control=['if'], data=[]] return r
def enumerate_device_serials(vid=FT232H_VID, pid=FT232H_PID): """Return a list of all FT232H device serial numbers connected to the machine. You can use these serial numbers to open a specific FT232H device by passing it to the FT232H initializer's serial parameter. """ try: # Create a libftdi context. ctx = None ctx = ftdi.new() # Enumerate FTDI devices. device_list = None count, device_list = ftdi.usb_find_all(ctx, vid, pid) if count < 0: raise RuntimeError('ftdi_usb_find_all returned error {0}: {1}'.format(count, ftdi.get_error_string(self._ctx))) # Walk through list of devices and assemble list of serial numbers. devices = [] while device_list is not None: # Get USB device strings and add serial to list of devices. ret, manufacturer, description, serial = ftdi.usb_get_strings(ctx, device_list.dev, 256, 256, 256) if serial is not None: devices.append(serial) device_list = device_list.next return devices finally: # Make sure to clean up list and context when done. if device_list is not None: ftdi.list_free(device_list) if ctx is not None: ftdi.free(ctx)
def function[enumerate_device_serials, parameter[vid, pid]]: constant[Return a list of all FT232H device serial numbers connected to the machine. You can use these serial numbers to open a specific FT232H device by passing it to the FT232H initializer's serial parameter. ] <ast.Try object at 0x7da1b016fdf0>
keyword[def] identifier[enumerate_device_serials] ( identifier[vid] = identifier[FT232H_VID] , identifier[pid] = identifier[FT232H_PID] ): literal[string] keyword[try] : identifier[ctx] = keyword[None] identifier[ctx] = identifier[ftdi] . identifier[new] () identifier[device_list] = keyword[None] identifier[count] , identifier[device_list] = identifier[ftdi] . identifier[usb_find_all] ( identifier[ctx] , identifier[vid] , identifier[pid] ) keyword[if] identifier[count] < literal[int] : keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[count] , identifier[ftdi] . identifier[get_error_string] ( identifier[self] . identifier[_ctx] ))) identifier[devices] =[] keyword[while] identifier[device_list] keyword[is] keyword[not] keyword[None] : identifier[ret] , identifier[manufacturer] , identifier[description] , identifier[serial] = identifier[ftdi] . identifier[usb_get_strings] ( identifier[ctx] , identifier[device_list] . identifier[dev] , literal[int] , literal[int] , literal[int] ) keyword[if] identifier[serial] keyword[is] keyword[not] keyword[None] : identifier[devices] . identifier[append] ( identifier[serial] ) identifier[device_list] = identifier[device_list] . identifier[next] keyword[return] identifier[devices] keyword[finally] : keyword[if] identifier[device_list] keyword[is] keyword[not] keyword[None] : identifier[ftdi] . identifier[list_free] ( identifier[device_list] ) keyword[if] identifier[ctx] keyword[is] keyword[not] keyword[None] : identifier[ftdi] . identifier[free] ( identifier[ctx] )
def enumerate_device_serials(vid=FT232H_VID, pid=FT232H_PID): """Return a list of all FT232H device serial numbers connected to the machine. You can use these serial numbers to open a specific FT232H device by passing it to the FT232H initializer's serial parameter. """ try: # Create a libftdi context. ctx = None ctx = ftdi.new() # Enumerate FTDI devices. device_list = None (count, device_list) = ftdi.usb_find_all(ctx, vid, pid) if count < 0: raise RuntimeError('ftdi_usb_find_all returned error {0}: {1}'.format(count, ftdi.get_error_string(self._ctx))) # depends on [control=['if'], data=['count']] # Walk through list of devices and assemble list of serial numbers. devices = [] while device_list is not None: # Get USB device strings and add serial to list of devices. (ret, manufacturer, description, serial) = ftdi.usb_get_strings(ctx, device_list.dev, 256, 256, 256) if serial is not None: devices.append(serial) # depends on [control=['if'], data=['serial']] device_list = device_list.next # depends on [control=['while'], data=['device_list']] return devices # depends on [control=['try'], data=[]] finally: # Make sure to clean up list and context when done. if device_list is not None: ftdi.list_free(device_list) # depends on [control=['if'], data=['device_list']] if ctx is not None: ftdi.free(ctx) # depends on [control=['if'], data=['ctx']]
def blit(self, surface, pos=(0, 0)): """ Blits a surface on this surface at pos :param surface: Surface to blit :param pos: Top left point to start blitting :type surface: Surface :type pos: tuple """ for x in range(surface.width): for y in range(surface.height): px = x + pos[0] py = y + pos[1] if 0 < px < self.width and 0 < py < self.height: self.matrix[px][py] = surface.matrix[x][y]
def function[blit, parameter[self, surface, pos]]: constant[ Blits a surface on this surface at pos :param surface: Surface to blit :param pos: Top left point to start blitting :type surface: Surface :type pos: tuple ] for taget[name[x]] in starred[call[name[range], parameter[name[surface].width]]] begin[:] for taget[name[y]] in starred[call[name[range], parameter[name[surface].height]]] begin[:] variable[px] assign[=] binary_operation[name[x] + call[name[pos]][constant[0]]] variable[py] assign[=] binary_operation[name[y] + call[name[pos]][constant[1]]] if <ast.BoolOp object at 0x7da1b15a37f0> begin[:] call[call[name[self].matrix][name[px]]][name[py]] assign[=] call[call[name[surface].matrix][name[x]]][name[y]]
keyword[def] identifier[blit] ( identifier[self] , identifier[surface] , identifier[pos] =( literal[int] , literal[int] )): literal[string] keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[surface] . identifier[width] ): keyword[for] identifier[y] keyword[in] identifier[range] ( identifier[surface] . identifier[height] ): identifier[px] = identifier[x] + identifier[pos] [ literal[int] ] identifier[py] = identifier[y] + identifier[pos] [ literal[int] ] keyword[if] literal[int] < identifier[px] < identifier[self] . identifier[width] keyword[and] literal[int] < identifier[py] < identifier[self] . identifier[height] : identifier[self] . identifier[matrix] [ identifier[px] ][ identifier[py] ]= identifier[surface] . identifier[matrix] [ identifier[x] ][ identifier[y] ]
def blit(self, surface, pos=(0, 0)): """ Blits a surface on this surface at pos :param surface: Surface to blit :param pos: Top left point to start blitting :type surface: Surface :type pos: tuple """ for x in range(surface.width): for y in range(surface.height): px = x + pos[0] py = y + pos[1] if 0 < px < self.width and 0 < py < self.height: self.matrix[px][py] = surface.matrix[x][y] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['y']] # depends on [control=['for'], data=['x']]
def dependency_graph(self, filename=None): """Visualize the computational graph. :param filename: Filename of the output image together with file extension. Supported formats: `png`, `jpg`, `pdf`, ... . Check `graphviz` Python package for more options :type filename: str :return: The DOT representation of the computational graph, with some more formatting :rtype: Digraph """ dot = self.get_dot() dot.attr(rankdir='LR') # Show graph from left to right if filename is not None: file_name, file_format = filename.rsplit('.', 1) dot.render(filename=file_name, format=file_format, cleanup=True) return dot
def function[dependency_graph, parameter[self, filename]]: constant[Visualize the computational graph. :param filename: Filename of the output image together with file extension. Supported formats: `png`, `jpg`, `pdf`, ... . Check `graphviz` Python package for more options :type filename: str :return: The DOT representation of the computational graph, with some more formatting :rtype: Digraph ] variable[dot] assign[=] call[name[self].get_dot, parameter[]] call[name[dot].attr, parameter[]] if compare[name[filename] is_not constant[None]] begin[:] <ast.Tuple object at 0x7da20c993490> assign[=] call[name[filename].rsplit, parameter[constant[.], constant[1]]] call[name[dot].render, parameter[]] return[name[dot]]
keyword[def] identifier[dependency_graph] ( identifier[self] , identifier[filename] = keyword[None] ): literal[string] identifier[dot] = identifier[self] . identifier[get_dot] () identifier[dot] . identifier[attr] ( identifier[rankdir] = literal[string] ) keyword[if] identifier[filename] keyword[is] keyword[not] keyword[None] : identifier[file_name] , identifier[file_format] = identifier[filename] . identifier[rsplit] ( literal[string] , literal[int] ) identifier[dot] . identifier[render] ( identifier[filename] = identifier[file_name] , identifier[format] = identifier[file_format] , identifier[cleanup] = keyword[True] ) keyword[return] identifier[dot]
def dependency_graph(self, filename=None): """Visualize the computational graph. :param filename: Filename of the output image together with file extension. Supported formats: `png`, `jpg`, `pdf`, ... . Check `graphviz` Python package for more options :type filename: str :return: The DOT representation of the computational graph, with some more formatting :rtype: Digraph """ dot = self.get_dot() dot.attr(rankdir='LR') # Show graph from left to right if filename is not None: (file_name, file_format) = filename.rsplit('.', 1) dot.render(filename=file_name, format=file_format, cleanup=True) # depends on [control=['if'], data=['filename']] return dot
def ls(self): """List the children entities of the directory. Raises exception if the object is a file. :return: """ if self.isfile(): raise NotDirectoryError('Cannot ls() on non-directory node: {path}'.format(path=self._pyerarchy_path)) return os.listdir(self._pyerarchy_path)
def function[ls, parameter[self]]: constant[List the children entities of the directory. Raises exception if the object is a file. :return: ] if call[name[self].isfile, parameter[]] begin[:] <ast.Raise object at 0x7da204961d50> return[call[name[os].listdir, parameter[name[self]._pyerarchy_path]]]
keyword[def] identifier[ls] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[isfile] (): keyword[raise] identifier[NotDirectoryError] ( literal[string] . identifier[format] ( identifier[path] = identifier[self] . identifier[_pyerarchy_path] )) keyword[return] identifier[os] . identifier[listdir] ( identifier[self] . identifier[_pyerarchy_path] )
def ls(self): """List the children entities of the directory. Raises exception if the object is a file. :return: """ if self.isfile(): raise NotDirectoryError('Cannot ls() on non-directory node: {path}'.format(path=self._pyerarchy_path)) # depends on [control=['if'], data=[]] return os.listdir(self._pyerarchy_path)
def write_to_filterbank(self, filename_out): """ Write data to blimpy file. Args: filename_out (str): Name of output file """ print("[Filterbank] Warning: Non-standard function to write in filterbank (.fil) format. Please use Waterfall.") n_bytes = int(self.header[b'nbits'] / 8) with open(filename_out, "wb") as fileh: fileh.write(generate_sigproc_header(self)) j = self.data if n_bytes == 4: np.float32(j.ravel()).tofile(fileh) elif n_bytes == 2: np.int16(j.ravel()).tofile(fileh) elif n_bytes == 1: np.int8(j.ravel()).tofile(fileh)
def function[write_to_filterbank, parameter[self, filename_out]]: constant[ Write data to blimpy file. Args: filename_out (str): Name of output file ] call[name[print], parameter[constant[[Filterbank] Warning: Non-standard function to write in filterbank (.fil) format. Please use Waterfall.]]] variable[n_bytes] assign[=] call[name[int], parameter[binary_operation[call[name[self].header][constant[b'nbits']] / constant[8]]]] with call[name[open], parameter[name[filename_out], constant[wb]]] begin[:] call[name[fileh].write, parameter[call[name[generate_sigproc_header], parameter[name[self]]]]] variable[j] assign[=] name[self].data if compare[name[n_bytes] equal[==] constant[4]] begin[:] call[call[name[np].float32, parameter[call[name[j].ravel, parameter[]]]].tofile, parameter[name[fileh]]]
keyword[def] identifier[write_to_filterbank] ( identifier[self] , identifier[filename_out] ): literal[string] identifier[print] ( literal[string] ) identifier[n_bytes] = identifier[int] ( identifier[self] . identifier[header] [ literal[string] ]/ literal[int] ) keyword[with] identifier[open] ( identifier[filename_out] , literal[string] ) keyword[as] identifier[fileh] : identifier[fileh] . identifier[write] ( identifier[generate_sigproc_header] ( identifier[self] )) identifier[j] = identifier[self] . identifier[data] keyword[if] identifier[n_bytes] == literal[int] : identifier[np] . identifier[float32] ( identifier[j] . identifier[ravel] ()). identifier[tofile] ( identifier[fileh] ) keyword[elif] identifier[n_bytes] == literal[int] : identifier[np] . identifier[int16] ( identifier[j] . identifier[ravel] ()). identifier[tofile] ( identifier[fileh] ) keyword[elif] identifier[n_bytes] == literal[int] : identifier[np] . identifier[int8] ( identifier[j] . identifier[ravel] ()). identifier[tofile] ( identifier[fileh] )
def write_to_filterbank(self, filename_out): """ Write data to blimpy file. Args: filename_out (str): Name of output file """ print('[Filterbank] Warning: Non-standard function to write in filterbank (.fil) format. Please use Waterfall.') n_bytes = int(self.header[b'nbits'] / 8) with open(filename_out, 'wb') as fileh: fileh.write(generate_sigproc_header(self)) j = self.data if n_bytes == 4: np.float32(j.ravel()).tofile(fileh) # depends on [control=['if'], data=[]] elif n_bytes == 2: np.int16(j.ravel()).tofile(fileh) # depends on [control=['if'], data=[]] elif n_bytes == 1: np.int8(j.ravel()).tofile(fileh) # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['fileh']]