_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
31
13.1k
language
stringclasses
1 value
meta_information
dict
q270100
ExecuteHomeAssistant.get_url
test
def get_url(self): """Home assistant url :return: url :rtype: str """ url = super(ExecuteHomeAssistant, self).get_url() if not self.data.get('event'):
python
{ "resource": "" }
q270101
ExecuteIFTTT.get_url
test
def get_url(self): """IFTTT Webhook url :return: url :rtype: str """ if not self.data[self.execute_name]: raise InvalidConfig(extra_body='Value for IFTTT is required on {} device. Get your key here: ' 'https://ifttt.com/services/maker_webhooks/settings'.format(self.name)) if not self.data.get('event'): raise InvalidConfig(extra_body='Event option is required for IFTTT on {} device. '
python
{ "resource": "" }
q270102
pkt_text
test
def pkt_text(pkt): """Return source mac address for this Scapy Packet :param scapy.packet.Packet pkt: Scapy Packet :return: Mac address. Include (Amazon Device) for these devices :rtype: str """ if pkt.src.upper() in BANNED_DEVICES: body = ''
python
{ "resource": "" }
q270103
discovery_print
test
def discovery_print(pkt): """Scandevice callback. Register src mac to avoid src repetition. Print device on screen. :param scapy.packet.Packet pkt: Scapy
python
{ "resource": "" }
q270104
discover
test
def discover(interface=None): """Print help and scan devices on screen. :return: None
python
{ "resource": "" }
q270105
Device.execute
test
def execute(self, root_allowed=False): """Execute this device :param bool root_allowed: Only used for ExecuteCmd :return: None """ logger.debug('%s device executed (mac %s)', self.name, self.src) if not self.execute_instance: msg = '%s: There is not execution method in device conf.' logger.warning(msg, self.name) self.send_confirmation(msg % self.name, False) return try: result = self.execute_instance.execute(root_allowed) except Exception as e: self.send_confirmation('Error executing the device {}: {}'.format(self.name, e), False)
python
{ "resource": "" }
q270106
Device.send_confirmation
test
def send_confirmation(self, message, success=True): """Send success or error message to configured confirmation :param str message: Body message to send :param bool success: Device executed successfully to personalize message :return: None """ message = message.strip() if not self.confirmation: return try:
python
{ "resource": "" }
q270107
Listener.on_push
test
def on_push(self, device): """Press button. Check DEFAULT_DELAY. :param scapy.packet.Packet device: Scapy packet :return: None
python
{ "resource": "" }
q270108
Listener.execute
test
def execute(self, device): """Execute a device. Used if the time between executions is greater than DEFAULT_DELAY :param scapy.packet.Packet device: Scapy packet :return: None """ src = device.src.lower()
python
{ "resource": "" }
q270109
Listener.run
test
def run(self, root_allowed=False): """Start daemon mode :param bool root_allowed: Only used for ExecuteCmd :return: loop """ self.root_allowed = root_allowed
python
{ "resource": "" }
q270110
OfxConverter.convert
test
def convert(self, txn): """ Convert an OFX Transaction to a posting """ ofxid = self.mk_ofxid(txn.id) metadata = {} posting_metadata = {"ofxid": ofxid} if isinstance(txn, OfxTransaction): posting = Posting(self.name, Amount(txn.amount, self.currency), metadata=posting_metadata) return Transaction( date=txn.date, payee=self.format_payee(txn), postings=[ posting, posting.clone_inverted( self.mk_dynamic_account(self.format_payee(txn), exclude=self.name))]) elif isinstance(txn, InvestmentTransaction): acct1 = self.name acct2 = self.name posting1 = None posting2 = None security = self.maybe_get_ticker(txn.security) if isinstance(txn.type, str): # recent versions of ofxparse if re.match('^(buy|sell)', txn.type): acct2 = self.unknownaccount or 'Assets:Unknown' elif txn.type == 'transfer': acct2 = 'Transfer' elif txn.type == 'reinvest': # reinvestment of income # TODO: make this configurable acct2 = 'Income:Interest' elif txn.type == 'income' and txn.income_type == 'DIV': # Fidelity lists non-reinvested dividend income as # type: income, income_type: DIV # TODO: determine how dividend income is listed from other institutions # income/DIV transactions do not involve buying or selling a security # so their postings need special handling compared to # others metadata['dividend_from'] = security acct2 = 'Income:Dividends' posting1 = Posting(acct1, Amount(txn.total, self.currency), metadata=posting_metadata) posting2 = posting1.clone_inverted(acct2) else: # ??? pass else: # Old version of ofxparse if (txn.type in [0, 1, 3, 4]): # buymf, sellmf, buystock, sellstock acct2 = self.unknownaccount or 'Assets:Unknown' elif (txn.type == 2): # reinvest acct2 = 'Income:Interest' else: # ??? pass
python
{ "resource": "" }
q270111
find_ledger_file
test
def find_ledger_file(ledgerrcpath=None): """Returns main ledger file path or raise exception if it cannot be \ found.""" if ledgerrcpath is None: ledgerrcpath = os.path.abspath(os.path.expanduser("~/.ledgerrc")) if "LEDGER_FILE" in os.environ: return os.path.abspath(os.path.expanduser(os.environ["LEDGER_FILE"])) elif os.path.exists(ledgerrcpath): # hacky ledgerrc = open(ledgerrcpath)
python
{ "resource": "" }
q270112
compatibility
test
def compatibility(session, install): """Run the unit test suite with each support library and Python version."""
python
{ "resource": "" }
q270113
get_long_description
test
def get_long_description(): """Transform README.md into a usable long description. Replaces relative references to svg images to absolute https references. """ with open('README.md') as f: read_me = f.read() def replace_relative_with_absolute(match): svg_path = match.group(0)[1:-1] return ('(https://github.com/google/pybadges/raw/master/'
python
{ "resource": "" }
q270114
PrecalculatedTextMeasurer.from_json
test
def from_json(f: TextIO) -> 'PrecalculatedTextMeasurer': """Return a PrecalculatedTextMeasurer given a JSON stream. See precalculate_text.py for details on the required
python
{ "resource": "" }
q270115
PrecalculatedTextMeasurer.default
test
def default(cls) -> 'PrecalculatedTextMeasurer': """Returns a reasonable default PrecalculatedTextMeasurer.""" if cls._default_cache is not None: return cls._default_cache if pkg_resources.resource_exists(__name__, 'default-widths.json.xz'): import lzma with pkg_resources.resource_stream(__name__, 'default-widths.json.xz') as f: with lzma.open(f, "rt") as g: cls._default_cache = PrecalculatedTextMeasurer.from_json( cast(TextIO, g))
python
{ "resource": "" }
q270116
badge
test
def badge(left_text: str, right_text: str, left_link: Optional[str] = None, right_link: Optional[str] = None, whole_link: Optional[str] = None, logo: Optional[str] = None, left_color: str = '#555', right_color: str = '#007ec6', measurer: Optional[text_measurer.TextMeasurer] = None, embed_logo: bool = False) -> str: """Creates a github-style badge as an SVG image. >>> badge(left_text='coverage', right_text='23%', right_color='red') '<svg...</svg>' >>> badge(left_text='build', right_text='green', right_color='green', ... whole_link="http://www.example.com/") '<svg...</svg>' Args: left_text: The text that should appear on the left-hand-side of the badge e.g. "coverage". right_text: The text that should appear on the right-hand-side of the badge e.g. "23%". left_link: The URL that should be redirected to when the left-hand text is selected. right_link: The URL that should be redirected to when the right-hand text is selected. whole_link: The link that should be redirected to when the badge is selected. If set then left_link and right_right may not be set. logo: A url representing a logo that will be displayed inside the badge. Can be a data URL e.g. "data:image/svg+xml;utf8,<svg..." left_color: The color of the part of the badge containing the left-hand text. Can be an valid CSS color (see https://developer.mozilla.org/en-US/docs/Web/CSS/color) or a color name defined here: https://github.com/badges/shields/blob/master/lib/colorscheme.json right_color: The color of the part of the badge containing the right-hand text. Can be an valid CSS color (see https://developer.mozilla.org/en-US/docs/Web/CSS/color) or a color name defined here: https://github.com/badges/shields/blob/master/lib/colorscheme.json measurer: A text_measurer.TextMeasurer that can be used to measure the width of left_text and right_text. embed_logo: If True then embed the logo image directly in the badge. This can prevent an HTTP request and some browsers will not render external image referenced. When True, `logo` must be a HTTP/HTTPS URI or a filesystem path. Also, the `badge` call may raise an
python
{ "resource": "" }
q270117
generate_supported_characters
test
def generate_supported_characters(deja_vu_sans_path: str) -> Iterable[str]: """Generate the characters support by the font at the
python
{ "resource": "" }
q270118
generate_encodeable_characters
test
def generate_encodeable_characters(characters: Iterable[str], encodings: Iterable[str]) -> Iterable[str]: """Generates the subset of 'characters' that can be encoded by 'encodings'. Args: characters: The characters to check for encodeability e.g. 'abcd'. encodings: The encodings to check against e.g. ['cp1252', 'iso-8859-5']. Returns: The subset of
python
{ "resource": "" }
q270119
calculate_character_to_length_mapping
test
def calculate_character_to_length_mapping( measurer: text_measurer.TextMeasurer, characters: Iterable[str]) -> Mapping[str, float]: """Return a mapping between each given character and its length. Args: measurer: The TextMeasurer used to measure the width of the text in pixels. characters: The characters to measure e.g. "ml".
python
{ "resource": "" }
q270120
write_json
test
def write_json(f: TextIO, deja_vu_sans_path: str, measurer: text_measurer.TextMeasurer, encodings: Iterable[str]) -> None: """Write the data required by PrecalculatedTextMeasurer to a stream.""" supported_characters = list( generate_supported_characters(deja_vu_sans_path)) kerning_characters = ''.join( generate_encodeable_characters(supported_characters, encodings)) char_to_length = calculate_character_to_length_mapping(measurer, supported_characters) pair_to_kerning
python
{ "resource": "" }
q270121
convolve_gaussian_2d
test
def convolve_gaussian_2d(image, gaussian_kernel_1d): """Convolve 2d gaussian.""" result = scipy.ndimage.filters.correlate1d(
python
{ "resource": "" }
q270122
get_gaussian_kernel
test
def get_gaussian_kernel(gaussian_kernel_width=11, gaussian_kernel_sigma=1.5): """Generate a gaussian kernel.""" # 1D Gaussian kernel definition gaussian_kernel_1d = numpy.ndarray((gaussian_kernel_width)) norm_mu = int(gaussian_kernel_width / 2) # Fill Gaussian kernel for i in range(gaussian_kernel_width): gaussian_kernel_1d[i] =
python
{ "resource": "" }
q270123
to_grayscale
test
def to_grayscale(img): """Convert PIL image to numpy grayscale array and numpy alpha array. Args: img (PIL.Image): PIL Image object. Returns: (gray, alpha): both numpy arrays. """ gray = numpy.asarray(ImageOps.grayscale(img)).astype(numpy.float)
python
{ "resource": "" }
q270124
main
test
def main(): """Main function for pyssim.""" description = '\n'.join([ 'Compares an image with a list of images using the SSIM metric.', ' Example:', ' pyssim test-images/test1-1.png "test-images/*"' ]) parser = argparse.ArgumentParser( prog='pyssim', formatter_class=argparse.RawTextHelpFormatter, description=description) parser.add_argument('--cw', help='compute the complex wavelet SSIM', action='store_true') parser.add_argument( 'base_image', metavar='image1.png', type=argparse.FileType('r')) parser.add_argument( 'comparison_images', metavar='image path with* or image2.png') parser.add_argument('--width', type=int, default=None, help='scales the image before computing SSIM') parser.add_argument('--height', type=int, default=None, help='scales the image before computing SSIM') args = parser.parse_args() if args.width and args.height: size = (args.width, args.height) else: size = None if not args.cw: gaussian_kernel_sigma = 1.5 gaussian_kernel_width = 11 gaussian_kernel_1d = get_gaussian_kernel( gaussian_kernel_width, gaussian_kernel_sigma)
python
{ "resource": "" }
q270125
SSIM.ssim_value
test
def ssim_value(self, target): """Compute the SSIM value from the reference image to the target image. Args: target (str or PIL.Image): Input image to compare the reference image to. This may be a PIL Image object or, to save time, an SSIMImage object (e.g. the img member of another SSIM object). Returns: Computed SSIM float value. """ # Performance boost if handed a compatible SSIMImage object. if not isinstance(target, SSIMImage) \ or not np.array_equal(self.gaussian_kernel_1d, target.gaussian_kernel_1d): target = SSIMImage(target, self.gaussian_kernel_1d, self.img.size) img_mat_12 = self.img.img_gray * target.img_gray img_mat_sigma_12 = convolve_gaussian_2d(
python
{ "resource": "" }
q270126
compute_ssim
test
def compute_ssim(image1, image2, gaussian_kernel_sigma=1.5, gaussian_kernel_width=11): """Computes SSIM. Args: im1: First PIL Image object to compare. im2: Second PIL Image object to compare. Returns: SSIM float value. """
python
{ "resource": "" }
q270127
SyncObj.destroy
test
def destroy(self): """ Correctly destroy SyncObj. Stop autoTickThread, close connections, etc. """
python
{ "resource": "" }
q270128
SyncObj.setCodeVersion
test
def setCodeVersion(self, newVersion, callback = None): """Switch to a new code version on all cluster nodes. You should ensure that cluster nodes are updated, otherwise they won't be able to apply commands. :param newVersion: new code version :type int :param callback: will be called on cussess or fail :type callback: function(`FAIL_REASON <#pysyncobj.FAIL_REASON>`_, None) """ assert isinstance(newVersion, int) if newVersion > self.__selfCodeVersion:
python
{ "resource": "" }
q270129
SyncObj.getStatus
test
def getStatus(self): """Dumps different debug info about cluster to dict and return it""" status = {} status['version'] = VERSION status['revision'] = REVISION status['self'] = self.__selfNode status['state'] = self.__raftState status['leader'] = self.__raftLeader status['partner_nodes_count'] = len(self.__otherNodes) for node in self.__otherNodes: status['partner_node_status_server_' + node.id] = 2 if node in self.__connectedNodes else 0 status['readonly_nodes_count'] = len(self.__readonlyNodes) for node in self.__readonlyNodes: status['readonly_node_status_server_' + node.id] = 2 if node in self.__connectedNodes else 0 status['log_len'] = len(self.__raftLog) status['last_applied'] = self.__raftLastApplied status['commit_idx'] = self.__raftCommitIndex status['raft_term'] = self.__raftCurrentTerm
python
{ "resource": "" }
q270130
SyncObj.printStatus
test
def printStatus(self): """Dumps different debug info about cluster to default logger""" status = self.getStatus()
python
{ "resource": "" }
q270131
TCPTransport._connToNode
test
def _connToNode(self, conn): """ Find the node to which a connection belongs. :param conn: connection object :type conn: TcpConnection :returns corresponding node or None if the node cannot be found :rtype Node or None
python
{ "resource": "" }
q270132
TCPTransport._maybeBind
test
def _maybeBind(self): """ Bind the server unless it is already bound, this is a read-only node, or the last attempt was too recently. :raises TransportNotReadyError if the bind attempt fails """ if self._ready or self._selfIsReadonlyNode or time.time() < self._lastBindAttemptTime + self._syncObj.conf.bindRetryTime: return self._lastBindAttemptTime = time.time() try: self._server.bind() except Exception as e: self._bindAttempts += 1
python
{ "resource": "" }
q270133
TCPTransport._onNewIncomingConnection
test
def _onNewIncomingConnection(self, conn): """ Callback for connections initiated by the other side :param conn: connection object :type conn: TcpConnection """ self._unknownConnections.add(conn) encryptor = self._syncObj.encryptor if encryptor: conn.encryptor = encryptor
python
{ "resource": "" }
q270134
TCPTransport._onIncomingMessageReceived
test
def _onIncomingMessageReceived(self, conn, message): """ Callback for initial messages on incoming connections. Handles encryption, utility messages, and association of the connection with a Node. Once this initial setup is done, the relevant connected callback is executed, and further messages are deferred to the onMessageReceived callback. :param conn: connection object :type conn: TcpConnection :param message: received message :type message: any """ if self._syncObj.encryptor and not conn.sendRandKey: conn.sendRandKey = message conn.recvRandKey = os.urandom(32) conn.send(conn.recvRandKey) return # Utility messages if isinstance(message, list): done = False try: if message[0] == 'status': conn.send(self._syncObj.getStatus()) done = True elif message[0] == 'add': self._syncObj.addNodeToCluster(message[1], callback = functools.partial(self._utilityCallback, conn = conn, cmd = 'ADD', arg = message[1])) done = True elif message[0] == 'remove': if message[1] == self._selfNode.address: conn.send('FAIL REMOVE ' + message[1]) else: self._syncObj.removeNodeFromCluster(message[1], callback = functools.partial(self._utilityCallback, conn = conn, cmd = 'REMOVE', arg = message[1])) done = True elif message[0] == 'set_version': self._syncObj.setCodeVersion(message[1], callback = functools.partial(self._utilityCallback, conn = conn, cmd = 'SET_VERSION', arg = str(message[1]))) done = True except Exception as e: conn.send(str(e))
python
{ "resource": "" }
q270135
TCPTransport._utilityCallback
test
def _utilityCallback(self, res, err, conn, cmd, arg): """ Callback for the utility messages :param res: result of the command :param err: error code (one of pysyncobj.config.FAIL_REASON) :param conn: utility connection :param cmd: command :param arg: command arguments
python
{ "resource": "" }
q270136
TCPTransport._shouldConnect
test
def _shouldConnect(self, node): """ Check whether this node should initiate a connection to another node :param node: the other node :type node: Node """ return isinstance(node, TCPNode) and
python
{ "resource": "" }
q270137
TCPTransport._connectIfNecessarySingle
test
def _connectIfNecessarySingle(self, node): """ Connect to a node if necessary. :param node: node to connect to :type node: Node """ if node in self._connections and self._connections[node].state != CONNECTION_STATE.DISCONNECTED: return True if not self._shouldConnect(node): return False assert node in self._connections # Since we "should connect" to this node, there
python
{ "resource": "" }
q270138
TCPTransport._onOutgoingConnected
test
def _onOutgoingConnected(self, conn): """ Callback for when a new connection from this to another node is established. Handles encryption and informs the other node which node this is. If encryption is disabled, this triggers the onNodeConnected callback and messages are deferred to the onMessageReceived callback. If encryption is enabled, the first message is handled by _onOutgoingMessageReceived. :param conn: connection object :type conn: TcpConnection """ if self._syncObj.encryptor: conn.setOnMessageReceivedCallback(functools.partial(self._onOutgoingMessageReceived, conn)) # So we can process the sendRandKey
python
{ "resource": "" }
q270139
TCPTransport._onOutgoingMessageReceived
test
def _onOutgoingMessageReceived(self, conn, message): """ Callback for receiving a message on a new outgoing connection. Used only if encryption is enabled to exchange the random keys. Once the key exchange is done, this triggers the onNodeConnected callback, and further messages are deferred to the onMessageReceived callback. :param conn: connection object :type conn: TcpConnection :param message: received message :type message: any """ if not conn.sendRandKey:
python
{ "resource": "" }
q270140
TCPTransport._onDisconnected
test
def _onDisconnected(self, conn): """ Callback for when a connection is terminated or considered dead. Initiates a reconnect if necessary. :param conn: connection object :type conn: TcpConnection """ self._unknownConnections.discard(conn) node = self._connToNode(conn) if node is not None:
python
{ "resource": "" }
q270141
TCPTransport.addNode
test
def addNode(self, node): """ Add a node to the network :param node: node to add :type node: TCPNode """ self._nodes.add(node) self._nodeAddrToNode[node.address] = node if self._shouldConnect(node): conn = TcpConnection(poller = self._syncObj._poller, timeout = self._syncObj.conf.connectionTimeout, sendBufferSize = self._syncObj.conf.sendBufferSize, recvBufferSize = self._syncObj.conf.recvBufferSize)
python
{ "resource": "" }
q270142
TCPTransport.dropNode
test
def dropNode(self, node): """ Drop a node from the network :param node: node to drop :type node: Node """ conn = self._connections.pop(node, None) if conn is not None: # Calling conn.disconnect() immediately triggers the onDisconnected callback if the connection isn't already disconnected, so this is necessary to prevent the automatic reconnect. self._preventConnectNodes.add(node)
python
{ "resource": "" }
q270143
TCPTransport.send
test
def send(self, node, message): """ Send a message to a node. Returns False if the connection appears to be dead either before or after actually trying to send the message. :param node: target node :type node: Node :param message: message :param message: any :returns success :rtype bool
python
{ "resource": "" }
q270144
TCPTransport.destroy
test
def destroy(self): """ Destroy this transport """ self.setOnMessageReceivedCallback(None) self.setOnNodeConnectedCallback(None) self.setOnNodeDisconnectedCallback(None) self.setOnReadonlyNodeConnectedCallback(None) self.setOnReadonlyNodeDisconnectedCallback(None) for node in self._nodes | self._readonlyNodes: self.dropNode(node)
python
{ "resource": "" }
q270145
ReplQueue.put
test
def put(self, item): """Put an item into the queue. True - if item placed in queue. False - if queue is full and item can not be placed.""" if self.__maxsize
python
{ "resource": "" }
q270146
ReplPriorityQueue.put
test
def put(self, item): """Put an item into the queue. Items should be comparable, eg. tuples. True - if item placed in queue. False - if queue is full and item can not be placed."""
python
{ "resource": "" }
q270147
ReplPriorityQueue.get
test
def get(self, default=None): """Extract the smallest item from queue. Return default if queue is empty.""" if not self.__data:
python
{ "resource": "" }
q270148
ReplLockManager.tryAcquire
test
def tryAcquire(self, lockID, callback=None, sync=False, timeout=None): """Attempt to acquire lock. :param lockID: unique lock identifier. :type lockID: str :param sync: True - to wait until lock is acquired or failed to acquire. :type sync: bool :param callback: if sync is False - callback will be called with operation result. :type callback: func(opResult, error)
python
{ "resource": "" }
q270149
ReplLockManager.isAcquired
test
def isAcquired(self, lockID): """Check if lock is acquired by ourselves. :param lockID: unique lock identifier. :type lockID: str
python
{ "resource": "" }
q270150
ReplLockManager.release
test
def release(self, lockID, callback=None, sync=False, timeout=None): """ Release previously-acquired lock. :param lockID: unique lock identifier. :type lockID: str :param sync: True - to wait until lock is released or failed to release. :type sync: bool :param callback: if sync is False - callback will be called with operation result.
python
{ "resource": "" }
q270151
check
test
def check(func): """ Decorator which wraps checks and returns an error response on failure. """ def wrapped(*args, **kwargs): check_name = func.__name__ arg_name = None if args: arg_name = args[0] try: if arg_name: logger.debug("Checking '%s' for '%s'", check_name, arg_name) else: logger.debug("Checking '%s'", check_name) response = func(*args, **kwargs) except Exception as e: message = str(e) response = { "ok": False, "error": message, "stacktrace": traceback.format_exc(), } # The check contains several individual checks (e.g., one per # database). Preface the results by name. if arg_name:
python
{ "resource": "" }
q270152
token_required
test
def token_required(view_func): """ Decorator which ensures that one of the WATCHMAN_TOKENS is provided if set. WATCHMAN_TOKEN_NAME can also be set if the token GET parameter must be customized. """ def _parse_auth_header(auth_header): """ Parse the `Authorization` header Expected format: `WATCHMAN-TOKEN Token="ABC123"` """ # TODO: Figure out full set of allowed characters # http://stackoverflow.com/questions/19028068/illegal-characters-in-http-headers # https://www.w3.org/Protocols/rfc2616/rfc2616-sec2.html#sec2.2 # https://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2 reg = re.compile('(\w+)[=] ?"?([\w-]+)"?') header_dict = dict(reg.findall(auth_header)) return header_dict['Token'] def _get_passed_token(request): """ Try to get the passed token, starting with the header and fall back to `GET` param """ try: auth_header = request.META['HTTP_AUTHORIZATION'] token = _parse_auth_header(auth_header) except KeyError: token = request.GET.get(settings.WATCHMAN_TOKEN_NAME) return token
python
{ "resource": "" }
q270153
set_hosts
test
def set_hosts(hosts, use_ssl=False, ssl_cert_path=None): """ Sets the Elasticsearch hosts to use Args: hosts (str): A single hostname or URL, or list of hostnames or URLs use_ssl (bool): Use a HTTPS connection to the server ssl_cert_path (str): Path to the certificate chain """ if type(hosts) != list: hosts = [hosts] conn_params = { "hosts": hosts, "timeout": 20 } if use_ssl: conn_params['use_ssl']
python
{ "resource": "" }
q270154
create_indexes
test
def create_indexes(names, settings=None): """ Create Elasticsearch indexes Args: names (list): A list of index names settings (dict): Index settings """ for name in names: index = Index(name) try: if not index.exists(): logger.debug("Creating Elasticsearch index: {0}".format(name)) if settings is None: index.settings(number_of_shards=1,
python
{ "resource": "" }
q270155
migrate_indexes
test
def migrate_indexes(aggregate_indexes=None, forensic_indexes=None): """ Updates index mappings Args: aggregate_indexes (list): A list of aggregate index names forensic_indexes (list): A list of forensic index names """ version = 2 if aggregate_indexes is None: aggregate_indexes = [] if forensic_indexes is None: forensic_indexes = [] for aggregate_index_name in aggregate_indexes: if not Index(aggregate_index_name).exists(): continue aggregate_index = Index(aggregate_index_name) doc = "doc" fo_field = "published_policy.fo" fo = "fo" fo_mapping = aggregate_index.get_field_mapping(fields=[fo_field]) fo_mapping = fo_mapping[list(fo_mapping.keys())[0]]["mappings"] if doc not in fo_mapping: continue fo_mapping = fo_mapping[doc][fo_field]["mapping"][fo] fo_type = fo_mapping["type"] if fo_type == "long": new_index_name = "{0}-v{1}".format(aggregate_index_name, version) body = {"properties": {"published_policy.fo": { "type": "text", "fields": {
python
{ "resource": "" }
q270156
KafkaClient.strip_metadata
test
def strip_metadata(report): """ Duplicates org_name, org_email and report_id into JSON root and removes report_metadata key to bring it more inline with Elastic output.
python
{ "resource": "" }
q270157
KafkaClient.save_aggregate_reports_to_kafka
test
def save_aggregate_reports_to_kafka(self, aggregate_reports, aggregate_topic): """ Saves aggregate DMARC reports to Kafka Args: aggregate_reports (list): A list of aggregate report dictionaries to save to Kafka aggregate_topic (str): The name of the Kafka topic """ if (type(aggregate_reports) == dict or type(aggregate_reports) == OrderedDict): aggregate_reports = [aggregate_reports] if len(aggregate_reports) < 1: return for report in aggregate_reports: report['date_range'] = self.generate_daterange(report) report = self.strip_metadata(report) for slice in report['records']: slice['date_range'] = report['date_range'] slice['org_name'] = report['org_name'] slice['org_email'] = report['org_email'] slice['policy_published'] = report['policy_published'] slice['report_id'] = report['report_id'] logger.debug("Sending slice.")
python
{ "resource": "" }
q270158
extract_xml
test
def extract_xml(input_): """ Extracts xml from a zip or gzip file at the given path, file-like object, or bytes. Args: input_: A path to a file, a file like object, or bytes Returns: str: The extracted XML """ if type(input_) == str: file_object = open(input_, "rb") elif type(input_) == bytes: file_object = BytesIO(input_) else: file_object = input_ try: header = file_object.read(6) file_object.seek(0) if header.startswith(MAGIC_ZIP): _zip = zipfile.ZipFile(file_object) xml = _zip.open(_zip.namelist()[0]).read().decode() elif header.startswith(MAGIC_GZIP): xml = GzipFile(fileobj=file_object).read().decode() elif header.startswith(MAGIC_XML):
python
{ "resource": "" }
q270159
parse_aggregate_report_file
test
def parse_aggregate_report_file(_input, nameservers=None, dns_timeout=2.0, parallel=False): """Parses a file at the given path, a file-like object. or bytes as a aggregate DMARC report Args: _input: A path to a file, a file like object, or bytes nameservers (list): A list of one or more nameservers to use
python
{ "resource": "" }
q270160
parsed_forensic_reports_to_csv
test
def parsed_forensic_reports_to_csv(reports): """ Converts one or more parsed forensic reports to flat CSV format, including headers Args: reports: A parsed forensic report or list of parsed forensic reports Returns: str: Parsed forensic report data in flat CSV format, including headers """ fields = ["feedback_type", "user_agent", "version", "original_envelope_id", "original_mail_from", "original_rcpt_to", "arrival_date", "arrival_date_utc", "subject", "message_id", "authentication_results", "dkim_domain", "source_ip_address", "source_country", "source_reverse_dns", "source_base_domain", "delivery_result", "auth_failure", "reported_domain", "authentication_mechanisms", "sample_headers_only"] if type(reports) == OrderedDict: reports = [reports] csv_file = StringIO() csv_writer = DictWriter(csv_file, fieldnames=fields) csv_writer.writeheader() for report in
python
{ "resource": "" }
q270161
parse_report_file
test
def parse_report_file(input_, nameservers=None, dns_timeout=2.0, strip_attachment_payloads=False, parallel=False): """Parses a DMARC aggregate or forensic file at the given path, a file-like object. or bytes Args: input_: A path to a file, a file like object, or bytes nameservers (list): A list of one or more nameservers to use (Cloudflare's public DNS resolvers by default) dns_timeout (float): Sets the DNS timeout in seconds strip_attachment_payloads (bool): Remove attachment payloads from forensic report results parallel (bool): Parallel processing Returns: OrderedDict: The parsed DMARC report """ if type(input_) == str: file_object = open(input_, "rb") elif type(input_) == bytes: file_object = BytesIO(input_) else: file_object = input_ content = file_object.read() try: report = parse_aggregate_report_file(content, nameservers=nameservers, dns_timeout=dns_timeout,
python
{ "resource": "" }
q270162
get_imap_capabilities
test
def get_imap_capabilities(server): """ Returns a list of an IMAP server's capabilities Args: server (imapclient.IMAPClient): An instance of imapclient.IMAPClient Returns (list): A list of capabilities """ capabilities = list(map(str, list(server.capabilities())))
python
{ "resource": "" }
q270163
save_output
test
def save_output(results, output_directory="output"): """ Save report data in the given directory Args: results (OrderedDict): Parsing results output_directory: The patch to the directory to save in """ aggregate_reports = results["aggregate_reports"] forensic_reports = results["forensic_reports"] if os.path.exists(output_directory): if not os.path.isdir(output_directory): raise ValueError("{0} is not a directory".format(output_directory)) else: os.makedirs(output_directory) with open("{0}".format(os.path.join(output_directory, "aggregate.json")), "w", newline="\n", encoding="utf-8") as agg_json: agg_json.write(json.dumps(aggregate_reports, ensure_ascii=False, indent=2)) with open("{0}".format(os.path.join(output_directory, "aggregate.csv")), "w", newline="\n", encoding="utf-8") as agg_csv: csv = parsed_aggregate_reports_to_csv(aggregate_reports) agg_csv.write(csv) with open("{0}".format(os.path.join(output_directory, "forensic.json")), "w", newline="\n", encoding="utf-8") as for_json: for_json.write(json.dumps(forensic_reports, ensure_ascii=False, indent=2)) with open("{0}".format(os.path.join(output_directory, "forensic.csv")), "w", newline="\n", encoding="utf-8") as for_csv: csv = parsed_forensic_reports_to_csv(forensic_reports) for_csv.write(csv) samples_directory =
python
{ "resource": "" }
q270164
get_report_zip
test
def get_report_zip(results): """ Creates a zip file of parsed report output Args: results (OrderedDict): The parsed results Returns: bytes: zip file bytes """ def add_subdir(root_path, subdir): subdir_path = os.path.join(root_path, subdir) for subdir_root, subdir_dirs, subdir_files in os.walk(subdir_path): for subdir_file in subdir_files: subdir_file_path = os.path.join(root_path, subdir, subdir_file) if os.path.isfile(subdir_file_path): rel_path = os.path.relpath(subdir_root, subdir_file_path) subdir_arc_name = os.path.join(rel_path, subdir_file) zip_file.write(subdir_file_path, subdir_arc_name) for subdir in subdir_dirs: add_subdir(subdir_path, subdir) storage = BytesIO() tmp_dir = tempfile.mkdtemp() try: save_output(results, tmp_dir) with zipfile.ZipFile(storage, 'w', zipfile.ZIP_DEFLATED) as zip_file: for root, dirs, files in os.walk(tmp_dir): for file in files:
python
{ "resource": "" }
q270165
email_results
test
def email_results(results, host, mail_from, mail_to, port=0, ssl=False, user=None, password=None, subject=None, attachment_filename=None, message=None, ssl_context=None): """ Emails parsing results as a zip file Args: results (OrderedDict): Parsing results host: Mail server hostname or IP address mail_from: The value of the message from header mail_to : A list of addresses to mail to port (int): Port to use ssl (bool): Require a SSL connection from the start user: An optional username password: An optional password subject: Overrides the default message subject attachment_filename: Override the default attachment filename message: Override the default plain text body ssl_context: SSL context options """ logging.debug("Emailing report to: {0}".format(",".join(mail_to))) date_string = datetime.now().strftime("%Y-%m-%d") if attachment_filename: if not attachment_filename.lower().endswith(".zip"): attachment_filename += ".zip" filename = attachment_filename else: filename = "DMARC-{0}.zip".format(date_string) assert isinstance(mail_to, list) msg = MIMEMultipart() msg['From'] = mail_from msg['To'] = ", ".join(mail_to) msg['Date'] = email.utils.formatdate(localtime=True) msg['Subject'] = subject or "DMARC
python
{ "resource": "" }
q270166
HECClient.save_aggregate_reports_to_splunk
test
def save_aggregate_reports_to_splunk(self, aggregate_reports): """ Saves aggregate DMARC reports to Splunk Args: aggregate_reports: A list of aggregate report dictionaries to save in Splunk """ logger.debug("Saving aggregate reports to Splunk") if type(aggregate_reports) == dict: aggregate_reports = [aggregate_reports] if len(aggregate_reports) < 1: return data = self._common_data.copy() json_str = "" for report in aggregate_reports: for record in report["records"]: new_report = dict() for metadata in report["report_metadata"]: new_report[metadata] = report["report_metadata"][metadata] new_report["published_policy"] = report["policy_published"] new_report["source_ip_address"] = record["source"][ "ip_address"] new_report["source_country"] = record["source"]["country"] new_report["source_reverse_dns"] = record["source"][ "reverse_dns"] new_report["source_base_domain"] = record["source"][ "base_domain"] new_report["message_count"] = record["count"] new_report["disposition"] = record["policy_evaluated"][ "disposition" ] new_report["spf_aligned"] = record["alignment"]["spf"] new_report["dkim_aligned"] = record["alignment"]["dkim"] new_report["passed_dmarc"] = record["alignment"]["dmarc"] new_report["header_from"] = record["identifiers"][ "header_from"] new_report["envelope_from"] = record["identifiers"][ "envelope_from"] if "dkim" in record["auth_results"]: new_report["dkim_results"] = record["auth_results"][
python
{ "resource": "" }
q270167
HECClient.save_forensic_reports_to_splunk
test
def save_forensic_reports_to_splunk(self, forensic_reports): """ Saves forensic DMARC reports to Splunk Args: forensic_reports (list): A list of forensic report dictionaries to save in Splunk """ logger.debug("Saving forensic reports to Splunk") if type(forensic_reports) == dict: forensic_reports = [forensic_reports] if len(forensic_reports) < 1: return json_str = "" for report in forensic_reports:
python
{ "resource": "" }
q270168
decode_base64
test
def decode_base64(data): """ Decodes a base64 string, with padding being optional Args: data: A base64 encoded string Returns:
python
{ "resource": "" }
q270169
get_base_domain
test
def get_base_domain(domain, use_fresh_psl=False): """ Gets the base domain name for the given domain .. note:: Results are based on a list of public domain suffixes at https://publicsuffix.org/list/public_suffix_list.dat. Args: domain (str): A domain or subdomain use_fresh_psl (bool): Download a fresh Public Suffix List Returns: str: The base domain of the given domain """ psl_path = os.path.join(tempdir, "public_suffix_list.dat") def download_psl(): url = "https://publicsuffix.org/list/public_suffix_list.dat" # Use a browser-like user agent string to bypass some proxy blocks
python
{ "resource": "" }
q270170
get_reverse_dns
test
def get_reverse_dns(ip_address, cache=None, nameservers=None, timeout=2.0): """ Resolves an IP address to a hostname using a reverse DNS query Args: ip_address (str): The IP address to resolve cache (ExpiringDict): Cache storage nameservers (list): A list of one or more nameservers to use (Cloudflare's public DNS resolvers
python
{ "resource": "" }
q270171
human_timestamp_to_datetime
test
def human_timestamp_to_datetime(human_timestamp, to_utc=False): """ Converts a human-readable timestamp into a Python ``DateTime`` object Args: human_timestamp (str): A timestamp string to_utc (bool): Convert the timestamp to UTC Returns: DateTime: The converted timestamp
python
{ "resource": "" }
q270172
get_ip_address_country
test
def get_ip_address_country(ip_address, parallel=False): """ Uses the MaxMind Geolite2 Country database to return the ISO code for the country associated with the given IPv4 or IPv6 address Args: ip_address (str): The IP address to query for parallel (bool): Parallel processing Returns: str: And ISO country code associated with the given IP address """ def download_country_database(location="GeoLite2-Country.mmdb"): """Downloads the MaxMind Geolite2 Country database Args: location (str): Local location for the database file """ if parallel: logging.warning("Cannot download GeoIP database in parallel mode") return url = "https://geolite.maxmind.com/download/geoip/database/" \ "GeoLite2-Country.tar.gz" # Use a browser-like user agent string to bypass some proxy blocks headers = {"User-Agent": USER_AGENT} original_filename = "GeoLite2-Country.mmdb" try: response = requests.get(url, headers=headers) response.raise_for_status() tar_bytes = response.content
python
{ "resource": "" }
q270173
get_ip_address_info
test
def get_ip_address_info(ip_address, cache=None, nameservers=None, timeout=2.0, parallel=False): """ Returns reverse DNS and country information for the given IP address Args: ip_address (str): The IP address to check cache (ExpiringDict): Cache storage nameservers (list): A list of one or more nameservers to use (Cloudflare's public DNS resolvers by default) timeout (float): Sets the DNS timeout in seconds parallel (bool): parallel processing Returns:
python
{ "resource": "" }
q270174
convert_outlook_msg
test
def convert_outlook_msg(msg_bytes): """ Uses the ``msgconvert`` Perl utility to convert an Outlook MS file to standard RFC 822 format Args: msg_bytes (bytes): the content of the .msg file Returns: A RFC 822 string """ if not is_outlook_msg(msg_bytes): raise ValueError("The supplied bytes are not an Outlook MSG file") orig_dir = os.getcwd() tmp_dir = tempfile.mkdtemp() os.chdir(tmp_dir) with open("sample.msg", "wb") as msg_file: msg_file.write(msg_bytes) try: subprocess.check_call(["msgconvert", "sample.msg"], stdout=null_file, stderr=null_file)
python
{ "resource": "" }
q270175
_str_to_list
test
def _str_to_list(s): """Converts a comma separated string to a list""" _list =
python
{ "resource": "" }
q270176
cli_parse
test
def cli_parse(file_path, sa, nameservers, dns_timeout, parallel=False): """Separated this function for multiprocessing""" try: file_results = parse_report_file(file_path, nameservers=nameservers, dns_timeout=dns_timeout,
python
{ "resource": "" }
q270177
Client.drain
test
def drain(self, sid=None): """ Drain will put a connection into a drain state. All subscriptions will immediately be put into a drain state. Upon completion, the publishers will be drained and can not publish any additional messages. Upon draining of the publishers, the connection will be closed. Use the `closed_cb' option to know when the connection has moved from draining to closed. If a sid is passed, just the subscription with that sid will be drained without closing the connection. """ if self.is_draining: return if self.is_closed: raise ErrConnectionClosed if self.is_connecting or self.is_reconnecting: raise ErrConnectionReconnecting if sid is not None: return self._drain_sub(sid) # Start draining the subscriptions self._status = Client.DRAINING_SUBS drain_tasks = [] for ssid, sub in self._subs.items(): task = self._drain_sub(ssid) drain_tasks.append(task) drain_is_done = asyncio.gather(*drain_tasks)
python
{ "resource": "" }
q270178
Client.publish
test
def publish(self, subject, payload): """ Sends a PUB command to the server on the specified subject. ->> PUB hello 5 ->> MSG_PAYLOAD: world <<- MSG hello 2 5 """ if self.is_closed: raise ErrConnectionClosed if self.is_draining_pubs:
python
{ "resource": "" }
q270179
Client.publish_request
test
def publish_request(self, subject, reply, payload): """ Publishes a message tagging it with a reply subscription which can be used by those receiving the message to respond. ->> PUB hello _INBOX.2007314fe0fcb2cdc2a2914c1 5 ->> MSG_PAYLOAD: world <<- MSG hello 2 _INBOX.2007314fe0fcb2cdc2a2914c1 5 """ if self.is_closed: raise ErrConnectionClosed if self.is_draining_pubs:
python
{ "resource": "" }
q270180
Client._publish
test
def _publish(self, subject, reply, payload, payload_size): """ Sends PUB command to the NATS server. """ if subject == "": # Avoid sending messages with empty replies. raise ErrBadSubject payload_size_bytes = ("%d" % payload_size).encode() pub_cmd = b''.join([PUB_OP, _SPC_, subject.encode( ), _SPC_, reply, _SPC_, payload_size_bytes, _CRLF_, payload, _CRLF_])
python
{ "resource": "" }
q270181
Client.subscribe_async
test
def subscribe_async(self, subject, **kwargs): """ Sets the subcription to use a task per message to be processed. ..deprecated:: 7.0 Will be removed 9.0. """
python
{ "resource": "" }
q270182
Client.unsubscribe
test
def unsubscribe(self, ssid, max_msgs=0): """ Takes a subscription sequence id and removes the subscription from the client, optionally after receiving more than max_msgs. """ if self.is_closed: raise ErrConnectionClosed if self.is_draining: raise ErrConnectionDraining
python
{ "resource": "" }
q270183
Client.flush
test
def flush(self, timeout=60): """ Sends a ping to the server expecting a pong back ensuring what we have written so far has made it to the server and also enabling measuring of roundtrip time. In case a pong is not returned within the allowed timeout, then it will raise ErrTimeout. """ if timeout <= 0: raise ErrBadTimeout if self.is_closed: raise ErrConnectionClosed
python
{ "resource": "" }
q270184
Client._select_next_server
test
def _select_next_server(self): """ Looks up in the server pool for an available server and attempts to connect. """ while True: if len(self._server_pool) == 0: self._current_server = None raise ErrNoServers now = time.monotonic() s = self._server_pool.pop(0) if self.options["max_reconnect_attempts"] > 0: if s.reconnects > self.options["max_reconnect_attempts"]: # Discard server since already tried to reconnect too many times continue # Not yet exceeded max_reconnect_attempts so can still use # this server in the future. self._server_pool.append(s) if s.last_attempt is not None and now < s.last_attempt + self.options["reconnect_time_wait"]: # Backoff connecting to server if we attempted recently.
python
{ "resource": "" }
q270185
Client._process_err
test
def _process_err(self, err_msg): """ Processes the raw error message sent by the server and close connection with current server. """ if STALE_CONNECTION in err_msg: yield from self._process_op_err(ErrStaleConnection) return if AUTHORIZATION_VIOLATION in err_msg: self._err = ErrAuthorization else: m = b'nats: ' + err_msg[0] self._err = NatsError(m.decode()) do_cbs = False
python
{ "resource": "" }
q270186
Client._process_op_err
test
def _process_op_err(self, e): """ Process errors which occured while reading or parsing the protocol. If allow_reconnect is enabled it will try to switch the server to which it is currently connected otherwise it will disconnect. """ if self.is_connecting or self.is_closed or self.is_reconnecting: return if self.options["allow_reconnect"] and self.is_connected: self._status = Client.RECONNECTING self._ps.reset() if self._reconnection_task is not None and not self._reconnection_task.cancelled(): # Cancel the previous task
python
{ "resource": "" }
q270187
Client._connect_command
test
def _connect_command(self): ''' Generates a JSON string with the params to be used when sending CONNECT to the server. ->> CONNECT {"lang": "python3"} ''' options = { "verbose": self.options["verbose"], "pedantic": self.options["pedantic"], "lang": __lang__, "version": __version__, "protocol": PROTOCOL } if "auth_required" in self._server_info: if self._server_info["auth_required"]: # In case there is no password, then consider handle # sending a token instead. if self.options["user"] is not None and self.options["password"] is not None: options["user"] = self.options["user"] options["pass"] = self.options["password"] elif self.options["token"] is not None: options["auth_token"] = self.options["token"] elif self._current_server.uri.password is None: options["auth_token"] =
python
{ "resource": "" }
q270188
Client._process_pong
test
def _process_pong(self): """ Process PONG sent by server. """ if len(self._pongs) > 0: future = self._pongs.pop(0)
python
{ "resource": "" }
q270189
Client._process_msg
test
def _process_msg(self, sid, subject, reply, data): """ Process MSG sent by server. """ payload_size = len(data) self.stats['in_msgs'] += 1 self.stats['in_bytes'] += payload_size sub = self._subs.get(sid) if sub is None: # Skip in case no subscription present. return sub.received += 1 if sub.max_msgs > 0 and sub.received >= sub.max_msgs: # Enough messages so can throwaway subscription now. self._subs.pop(sid, None) msg = self._build_message(subject, reply, data) # Check if it is an old style request. if sub.future is not None: if sub.future.cancelled(): # Already gave up, nothing to do. return sub.future.set_result(msg) return # Let subscription wait_for_msgs coroutine process the messages, # but in case sending to the subscription task would block, # then consider it to be an slow consumer and drop the message. try: sub.pending_size +=
python
{ "resource": "" }
q270190
Client._process_info
test
def _process_info(self, info): """ Process INFO lines sent by the server to reconfigure client with latest updates from cluster to enable server discovery. """ if 'connect_urls' in info: if info['connect_urls']: connect_urls = [] for connect_url in info['connect_urls']: uri = urlparse("nats://%s" % connect_url) srv = Srv(uri) srv.discovered = True # Filter for any similar server in the server pool already. should_add = True for s in self._server_pool:
python
{ "resource": "" }
q270191
Client._process_connect_init
test
def _process_connect_init(self): """ Process INFO received from the server and CONNECT to the server with authentication. It is also responsible of setting up the reading and ping interval tasks from the client. """ self._status = Client.CONNECTING connection_completed = self._io_reader.readline() info_line = yield from asyncio.wait_for(connection_completed, self.options["connect_timeout"]) if INFO_OP not in info_line: raise NatsError("nats: empty response from server when expecting INFO message") _, info = info_line.split(INFO_OP + _SPC_, 1) try: srv_info = json.loads(info.decode()) except: raise NatsError("nats: info message, json parse error") self._process_info(srv_info) self._server_info = srv_info if 'max_payload' in self._server_info: self._max_payload = self._server_info["max_payload"] if 'tls_required' in self._server_info and self._server_info['tls_required']: ssl_context = None if "tls" in self.options: ssl_context = self.options.get('tls') elif self._current_server.uri.scheme == 'tls': ssl_context = ssl.create_default_context() else: raise NatsError('nats: no ssl context provided') transport = self._io_writer.transport sock = transport.get_extra_info('socket') if not sock: # This shouldn't happen raise NatsError('nats: unable to get socket') yield from self._io_writer.drain() # just in case something is left self._io_reader, self._io_writer = \ yield from asyncio.open_connection( loop=self._loop, limit=DEFAULT_BUFFER_SIZE, sock=sock, ssl=ssl_context, server_hostname=self._current_server.uri.hostname, )
python
{ "resource": "" }
q270192
Client._flusher
test
def _flusher(self): """ Coroutine which continuously tries to consume pending commands and then flushes them to the socket. """ while True: if not self.is_connected or self.is_connecting: break try: yield from self._flush_queue.get() if self._pending_data_size > 0: self._io_writer.writelines(self._pending[:]) self._pending = []
python
{ "resource": "" }
q270193
Client._read_loop
test
def _read_loop(self): """ Coroutine which gathers bytes sent by the server and feeds them to the protocol parser. In case of error while reading, it will stop running and its task has to be rescheduled. """ while True: try: should_bail = self.is_closed or self.is_reconnecting if should_bail or self._io_reader is None: break if
python
{ "resource": "" }
q270194
coactivation
test
def coactivation(dataset, seed, threshold=0.0, output_dir='.', prefix='', r=6): """ Compute and save coactivation map given input image as seed. This is essentially just a wrapper for a meta-analysis defined by the contrast between those studies that activate within the seed and those that don't. Args: dataset: a Dataset instance containing study and activation data. seed: either a Nifti or Analyze image defining the boundaries of the seed, or a list of triples (x/y/z) defining the seed(s). Note that voxels do not need to be contiguous to define a seed--all supra- threshold voxels will be lumped together. threshold: optional float indicating the threshold above which voxels are considered to be part of the seed ROI (default = 0) r: optional integer indicating radius (in mm) of spheres to grow (only used if seed is a list of
python
{ "resource": "" }
q270195
Decoder.decode
test
def decode(self, images, save=None, round=4, names=None, **kwargs): """ Decodes a set of images. Args: images: The images to decode. Can be: - A single String specifying the filename of the image to decode - A list of filenames - A single NumPy array containing the image data save: Optional filename to save results to. If None (default), returns all results as an array. round: Optional integer indicating number of decimals to round result to. Defaults to 4. names: Optional list of names corresponding to the images in filenames. If passed, must be of same length and in same order as filenames. By default, the columns in the output will be named using the image filenames. Returns: An n_features x n_files numpy array, where each feature is a row and each image is a column. The meaning of the values depends on the decoding method used. """ if isinstance(images, string_types): images = [images] if isinstance(images, list): imgs_to_decode = imageutils.load_imgs(images, self.masker) else:
python
{ "resource": "" }
q270196
Decoder._load_features_from_array
test
def _load_features_from_array(self, features): """ Load feature data from a 2D ndarray on disk. """
python
{ "resource": "" }
q270197
Decoder._load_features_from_images
test
def _load_features_from_images(self, images, names=None): """ Load feature image data from image files. Args: images: A list of image filenames. names: An optional list of strings to use as the feature names. Must be in the same order as the images. """ if names is not None and len(names) != len(images): raise Exception(
python
{ "resource": "" }
q270198
Decoder._pearson_correlation
test
def _pearson_correlation(self, imgs_to_decode): """ Decode images using Pearson's r. Computes the correlation between each input image and each feature image across voxels. Args: imgs_to_decode: An ndarray of images to decode, with voxels in
python
{ "resource": "" }
q270199
Decoder._dot_product
test
def _dot_product(self, imgs_to_decode): """ Decoding using the dot product. """
python
{ "resource": "" }