_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q270100
ExecuteHomeAssistant.get_url
test
def get_url(self): """Home assistant url :return: url :rtype: str """ url = super(ExecuteHomeAssistant, self).get_url() if not self.data.get('event'): raise InvalidConfig(extra_body='Event option is required for HomeAsistant on {} device.'.format(self.name)) url += '/api/events/{}'.format(self.data['event']) return url
python
{ "resource": "" }
q270101
ExecuteIFTTT.get_url
test
def get_url(self): """IFTTT Webhook url :return: url :rtype: str """ if not self.data[self.execute_name]: raise InvalidConfig(extra_body='Value for IFTTT is required on {} device. Get your key here: ' 'https://ifttt.com/services/maker_webhooks/settings'.format(self.name)) if not self.data.get('event'): raise InvalidConfig(extra_body='Event option is required for IFTTT on {} device. ' 'You define the event name when creating a Webhook ' 'applet'.format(self.name)) url = self.url_pattern.format(event=self.data['event'], key=self.data[self.execute_name]) return url
python
{ "resource": "" }
q270102
pkt_text
test
def pkt_text(pkt): """Return source mac address for this Scapy Packet :param scapy.packet.Packet pkt: Scapy Packet :return: Mac address. Include (Amazon Device) for these devices :rtype: str """ if pkt.src.upper() in BANNED_DEVICES: body = '' elif pkt.src.upper()[:8] in AMAZON_DEVICES: body = '{} (Amazon Device)'.format(pkt.src) else: body = pkt.src return body
python
{ "resource": "" }
q270103
discovery_print
test
def discovery_print(pkt): """Scandevice callback. Register src mac to avoid src repetition. Print device on screen. :param scapy.packet.Packet pkt: Scapy Packet :return: None """ if pkt.src in mac_id_list: return mac_id_list.append(pkt.src) text = pkt_text(pkt) click.secho(text, fg='magenta') if 'Amazon' in text else click.echo(text)
python
{ "resource": "" }
q270104
discover
test
def discover(interface=None): """Print help and scan devices on screen. :return: None """ click.secho(HELP, fg='yellow') scan_devices(discovery_print, lfilter=lambda d: d.src not in mac_id_list, iface=interface)
python
{ "resource": "" }
q270105
Device.execute
test
def execute(self, root_allowed=False): """Execute this device :param bool root_allowed: Only used for ExecuteCmd :return: None """ logger.debug('%s device executed (mac %s)', self.name, self.src) if not self.execute_instance: msg = '%s: There is not execution method in device conf.' logger.warning(msg, self.name) self.send_confirmation(msg % self.name, False) return try: result = self.execute_instance.execute(root_allowed) except Exception as e: self.send_confirmation('Error executing the device {}: {}'.format(self.name, e), False) raise else: result = 'The {} device has been started and is running right now'.format(self.name) \ if result is None else result result = result or 'The {} device has been executed successfully'.format(self.name) self.send_confirmation(result) return result
python
{ "resource": "" }
q270106
Device.send_confirmation
test
def send_confirmation(self, message, success=True): """Send success or error message to configured confirmation :param str message: Body message to send :param bool success: Device executed successfully to personalize message :return: None """ message = message.strip() if not self.confirmation: return try: self.confirmation.send(message, success) except Exception as e: logger.warning('Error sending confirmation on device {}: {}'.format(self.name, e))
python
{ "resource": "" }
q270107
Listener.on_push
test
def on_push(self, device): """Press button. Check DEFAULT_DELAY. :param scapy.packet.Packet device: Scapy packet :return: None """ src = device.src.lower() if last_execution[src] + self.settings.get('delay', DEFAULT_DELAY) > time.time(): return last_execution[src] = time.time() self.execute(device)
python
{ "resource": "" }
q270108
Listener.execute
test
def execute(self, device): """Execute a device. Used if the time between executions is greater than DEFAULT_DELAY :param scapy.packet.Packet device: Scapy packet :return: None """ src = device.src.lower() device = self.devices[src] threading.Thread(target=device.execute, kwargs={ 'root_allowed': self.root_allowed }).start()
python
{ "resource": "" }
q270109
Listener.run
test
def run(self, root_allowed=False): """Start daemon mode :param bool root_allowed: Only used for ExecuteCmd :return: loop """ self.root_allowed = root_allowed scan_devices(self.on_push, lambda d: d.src.lower() in self.devices, self.settings.get('interface'))
python
{ "resource": "" }
q270110
OfxConverter.convert
test
def convert(self, txn): """ Convert an OFX Transaction to a posting """ ofxid = self.mk_ofxid(txn.id) metadata = {} posting_metadata = {"ofxid": ofxid} if isinstance(txn, OfxTransaction): posting = Posting(self.name, Amount(txn.amount, self.currency), metadata=posting_metadata) return Transaction( date=txn.date, payee=self.format_payee(txn), postings=[ posting, posting.clone_inverted( self.mk_dynamic_account(self.format_payee(txn), exclude=self.name))]) elif isinstance(txn, InvestmentTransaction): acct1 = self.name acct2 = self.name posting1 = None posting2 = None security = self.maybe_get_ticker(txn.security) if isinstance(txn.type, str): # recent versions of ofxparse if re.match('^(buy|sell)', txn.type): acct2 = self.unknownaccount or 'Assets:Unknown' elif txn.type == 'transfer': acct2 = 'Transfer' elif txn.type == 'reinvest': # reinvestment of income # TODO: make this configurable acct2 = 'Income:Interest' elif txn.type == 'income' and txn.income_type == 'DIV': # Fidelity lists non-reinvested dividend income as # type: income, income_type: DIV # TODO: determine how dividend income is listed from other institutions # income/DIV transactions do not involve buying or selling a security # so their postings need special handling compared to # others metadata['dividend_from'] = security acct2 = 'Income:Dividends' posting1 = Posting(acct1, Amount(txn.total, self.currency), metadata=posting_metadata) posting2 = posting1.clone_inverted(acct2) else: # ??? pass else: # Old version of ofxparse if (txn.type in [0, 1, 3, 4]): # buymf, sellmf, buystock, sellstock acct2 = self.unknownaccount or 'Assets:Unknown' elif (txn.type == 2): # reinvest acct2 = 'Income:Interest' else: # ??? pass aux_date = None if txn.settleDate is not None and \ txn.settleDate != txn.tradeDate: aux_date = txn.settleDate # income/DIV already defined above; # this block defines all other posting types if posting1 is None and posting2 is None: posting1 = Posting( acct1, Amount( txn.units, security, unlimited=True), unit_price=Amount( txn.unit_price, self.currency, unlimited=True), metadata=posting_metadata) posting2 = Posting( acct2, Amount( txn.units * txn.unit_price, self.currency, reverse=True)) else: # Previously defined if type:income income_type/DIV pass return Transaction( date=txn.tradeDate, aux_date=aux_date, payee=self.format_payee(txn), metadata=metadata, postings=[posting1, posting2] )
python
{ "resource": "" }
q270111
find_ledger_file
test
def find_ledger_file(ledgerrcpath=None): """Returns main ledger file path or raise exception if it cannot be \ found.""" if ledgerrcpath is None: ledgerrcpath = os.path.abspath(os.path.expanduser("~/.ledgerrc")) if "LEDGER_FILE" in os.environ: return os.path.abspath(os.path.expanduser(os.environ["LEDGER_FILE"])) elif os.path.exists(ledgerrcpath): # hacky ledgerrc = open(ledgerrcpath) for line in ledgerrc.readlines(): md = re.match(r"--file\s+([^\s]+).*", line) if md is not None: return os.path.abspath(os.path.expanduser(md.group(1))) else: return None
python
{ "resource": "" }
q270112
compatibility
test
def compatibility(session, install): """Run the unit test suite with each support library and Python version.""" session.install('-e', '.[dev]') session.install(install) _run_tests(session)
python
{ "resource": "" }
q270113
get_long_description
test
def get_long_description(): """Transform README.md into a usable long description. Replaces relative references to svg images to absolute https references. """ with open('README.md') as f: read_me = f.read() def replace_relative_with_absolute(match): svg_path = match.group(0)[1:-1] return ('(https://github.com/google/pybadges/raw/master/' '%s?sanitize=true)' % svg_path) return re.sub(r'\(tests/golden-images/.*?\.svg\)', replace_relative_with_absolute, read_me)
python
{ "resource": "" }
q270114
PrecalculatedTextMeasurer.from_json
test
def from_json(f: TextIO) -> 'PrecalculatedTextMeasurer': """Return a PrecalculatedTextMeasurer given a JSON stream. See precalculate_text.py for details on the required format. """ o = json.load(f) return PrecalculatedTextMeasurer(o['mean-character-length'], o['character-lengths'], o['kerning-pairs'])
python
{ "resource": "" }
q270115
PrecalculatedTextMeasurer.default
test
def default(cls) -> 'PrecalculatedTextMeasurer': """Returns a reasonable default PrecalculatedTextMeasurer.""" if cls._default_cache is not None: return cls._default_cache if pkg_resources.resource_exists(__name__, 'default-widths.json.xz'): import lzma with pkg_resources.resource_stream(__name__, 'default-widths.json.xz') as f: with lzma.open(f, "rt") as g: cls._default_cache = PrecalculatedTextMeasurer.from_json( cast(TextIO, g)) return cls._default_cache elif pkg_resources.resource_exists(__name__, 'default-widths.json'): with pkg_resources.resource_stream(__name__, 'default-widths.json') as f: cls._default_cache = PrecalculatedTextMeasurer.from_json( io.TextIOWrapper(f, encoding='utf-8')) return cls._default_cache else: raise ValueError('could not load default-widths.json')
python
{ "resource": "" }
q270116
badge
test
def badge(left_text: str, right_text: str, left_link: Optional[str] = None, right_link: Optional[str] = None, whole_link: Optional[str] = None, logo: Optional[str] = None, left_color: str = '#555', right_color: str = '#007ec6', measurer: Optional[text_measurer.TextMeasurer] = None, embed_logo: bool = False) -> str: """Creates a github-style badge as an SVG image. >>> badge(left_text='coverage', right_text='23%', right_color='red') '<svg...</svg>' >>> badge(left_text='build', right_text='green', right_color='green', ... whole_link="http://www.example.com/") '<svg...</svg>' Args: left_text: The text that should appear on the left-hand-side of the badge e.g. "coverage". right_text: The text that should appear on the right-hand-side of the badge e.g. "23%". left_link: The URL that should be redirected to when the left-hand text is selected. right_link: The URL that should be redirected to when the right-hand text is selected. whole_link: The link that should be redirected to when the badge is selected. If set then left_link and right_right may not be set. logo: A url representing a logo that will be displayed inside the badge. Can be a data URL e.g. "data:image/svg+xml;utf8,<svg..." left_color: The color of the part of the badge containing the left-hand text. Can be an valid CSS color (see https://developer.mozilla.org/en-US/docs/Web/CSS/color) or a color name defined here: https://github.com/badges/shields/blob/master/lib/colorscheme.json right_color: The color of the part of the badge containing the right-hand text. Can be an valid CSS color (see https://developer.mozilla.org/en-US/docs/Web/CSS/color) or a color name defined here: https://github.com/badges/shields/blob/master/lib/colorscheme.json measurer: A text_measurer.TextMeasurer that can be used to measure the width of left_text and right_text. embed_logo: If True then embed the logo image directly in the badge. This can prevent an HTTP request and some browsers will not render external image referenced. When True, `logo` must be a HTTP/HTTPS URI or a filesystem path. Also, the `badge` call may raise an exception if the logo cannot be loaded, is not an image, etc. """ if measurer is None: measurer = ( precalculated_text_measurer.PrecalculatedTextMeasurer .default()) if (left_link or right_link) and whole_link: raise ValueError( 'whole_link may not bet set with left_link or right_link') template = _JINJA2_ENVIRONMENT.get_template('badge-template-full.svg') if logo and embed_logo: logo = _embed_image(logo) svg = template.render( left_text=left_text, right_text=right_text, left_text_width=measurer.text_width(left_text) / 10.0, right_text_width=measurer.text_width(right_text) / 10.0, left_link=left_link, right_link=right_link, whole_link=whole_link, logo=logo, left_color=_NAME_TO_COLOR.get(left_color, left_color), right_color=_NAME_TO_COLOR.get(right_color, right_color), ) xml = minidom.parseString(svg) _remove_blanks(xml) xml.normalize() return xml.documentElement.toxml()
python
{ "resource": "" }
q270117
generate_supported_characters
test
def generate_supported_characters(deja_vu_sans_path: str) -> Iterable[str]: """Generate the characters support by the font at the given path.""" font = ttLib.TTFont(deja_vu_sans_path) for cmap in font['cmap'].tables: if cmap.isUnicode(): for code in cmap.cmap: yield chr(code)
python
{ "resource": "" }
q270118
generate_encodeable_characters
test
def generate_encodeable_characters(characters: Iterable[str], encodings: Iterable[str]) -> Iterable[str]: """Generates the subset of 'characters' that can be encoded by 'encodings'. Args: characters: The characters to check for encodeability e.g. 'abcd'. encodings: The encodings to check against e.g. ['cp1252', 'iso-8859-5']. Returns: The subset of 'characters' that can be encoded using one of the provided encodings. """ for c in characters: for encoding in encodings: try: c.encode(encoding) yield c except UnicodeEncodeError: pass
python
{ "resource": "" }
q270119
calculate_character_to_length_mapping
test
def calculate_character_to_length_mapping( measurer: text_measurer.TextMeasurer, characters: Iterable[str]) -> Mapping[str, float]: """Return a mapping between each given character and its length. Args: measurer: The TextMeasurer used to measure the width of the text in pixels. characters: The characters to measure e.g. "ml". Returns: A mapping from the given characters to their length in pixels, as determined by 'measurer' e.g. {'m': 5.2, 'l', 1.2}. """ char_to_length = {} for c in characters: char_to_length[c] = measurer.text_width(c) return char_to_length
python
{ "resource": "" }
q270120
write_json
test
def write_json(f: TextIO, deja_vu_sans_path: str, measurer: text_measurer.TextMeasurer, encodings: Iterable[str]) -> None: """Write the data required by PrecalculatedTextMeasurer to a stream.""" supported_characters = list( generate_supported_characters(deja_vu_sans_path)) kerning_characters = ''.join( generate_encodeable_characters(supported_characters, encodings)) char_to_length = calculate_character_to_length_mapping(measurer, supported_characters) pair_to_kerning = calculate_pair_to_kern_mapping(measurer, char_to_length, kerning_characters) json.dump( {'mean-character-length': statistics.mean(char_to_length.values()), 'character-lengths': char_to_length, 'kerning-characters': kerning_characters, 'kerning-pairs': pair_to_kerning}, f, sort_keys=True, indent=1)
python
{ "resource": "" }
q270121
convolve_gaussian_2d
test
def convolve_gaussian_2d(image, gaussian_kernel_1d): """Convolve 2d gaussian.""" result = scipy.ndimage.filters.correlate1d( image, gaussian_kernel_1d, axis=0) result = scipy.ndimage.filters.correlate1d( result, gaussian_kernel_1d, axis=1) return result
python
{ "resource": "" }
q270122
get_gaussian_kernel
test
def get_gaussian_kernel(gaussian_kernel_width=11, gaussian_kernel_sigma=1.5): """Generate a gaussian kernel.""" # 1D Gaussian kernel definition gaussian_kernel_1d = numpy.ndarray((gaussian_kernel_width)) norm_mu = int(gaussian_kernel_width / 2) # Fill Gaussian kernel for i in range(gaussian_kernel_width): gaussian_kernel_1d[i] = (exp(-(((i - norm_mu) ** 2)) / (2 * (gaussian_kernel_sigma ** 2)))) return gaussian_kernel_1d / numpy.sum(gaussian_kernel_1d)
python
{ "resource": "" }
q270123
to_grayscale
test
def to_grayscale(img): """Convert PIL image to numpy grayscale array and numpy alpha array. Args: img (PIL.Image): PIL Image object. Returns: (gray, alpha): both numpy arrays. """ gray = numpy.asarray(ImageOps.grayscale(img)).astype(numpy.float) imbands = img.getbands() alpha = None if 'A' in imbands: alpha = numpy.asarray(img.split()[-1]).astype(numpy.float) return gray, alpha
python
{ "resource": "" }
q270124
main
test
def main(): """Main function for pyssim.""" description = '\n'.join([ 'Compares an image with a list of images using the SSIM metric.', ' Example:', ' pyssim test-images/test1-1.png "test-images/*"' ]) parser = argparse.ArgumentParser( prog='pyssim', formatter_class=argparse.RawTextHelpFormatter, description=description) parser.add_argument('--cw', help='compute the complex wavelet SSIM', action='store_true') parser.add_argument( 'base_image', metavar='image1.png', type=argparse.FileType('r')) parser.add_argument( 'comparison_images', metavar='image path with* or image2.png') parser.add_argument('--width', type=int, default=None, help='scales the image before computing SSIM') parser.add_argument('--height', type=int, default=None, help='scales the image before computing SSIM') args = parser.parse_args() if args.width and args.height: size = (args.width, args.height) else: size = None if not args.cw: gaussian_kernel_sigma = 1.5 gaussian_kernel_width = 11 gaussian_kernel_1d = get_gaussian_kernel( gaussian_kernel_width, gaussian_kernel_sigma) comparison_images = glob.glob(args.comparison_images) is_a_single_image = len(comparison_images) == 1 for comparison_image in comparison_images: if args.cw: ssim = SSIM(args.base_image.name, size=size) ssim_value = ssim.cw_ssim_value(comparison_image) else: ssim = SSIM(args.base_image.name, gaussian_kernel_1d, size=size) ssim_value = ssim.ssim_value(comparison_image) if is_a_single_image: sys.stdout.write('%.7g' % ssim_value) else: sys.stdout.write('%s - %s: %.7g' % ( args.base_image.name, comparison_image, ssim_value)) sys.stdout.write('\n')
python
{ "resource": "" }
q270125
SSIM.ssim_value
test
def ssim_value(self, target): """Compute the SSIM value from the reference image to the target image. Args: target (str or PIL.Image): Input image to compare the reference image to. This may be a PIL Image object or, to save time, an SSIMImage object (e.g. the img member of another SSIM object). Returns: Computed SSIM float value. """ # Performance boost if handed a compatible SSIMImage object. if not isinstance(target, SSIMImage) \ or not np.array_equal(self.gaussian_kernel_1d, target.gaussian_kernel_1d): target = SSIMImage(target, self.gaussian_kernel_1d, self.img.size) img_mat_12 = self.img.img_gray * target.img_gray img_mat_sigma_12 = convolve_gaussian_2d( img_mat_12, self.gaussian_kernel_1d) img_mat_mu_12 = self.img.img_gray_mu * target.img_gray_mu img_mat_sigma_12 = img_mat_sigma_12 - img_mat_mu_12 # Numerator of SSIM num_ssim = ((2 * img_mat_mu_12 + self.c_1) * (2 * img_mat_sigma_12 + self.c_2)) # Denominator of SSIM den_ssim = ( (self.img.img_gray_mu_squared + target.img_gray_mu_squared + self.c_1) * (self.img.img_gray_sigma_squared + target.img_gray_sigma_squared + self.c_2)) ssim_map = num_ssim / den_ssim index = np.average(ssim_map) return index
python
{ "resource": "" }
q270126
compute_ssim
test
def compute_ssim(image1, image2, gaussian_kernel_sigma=1.5, gaussian_kernel_width=11): """Computes SSIM. Args: im1: First PIL Image object to compare. im2: Second PIL Image object to compare. Returns: SSIM float value. """ gaussian_kernel_1d = get_gaussian_kernel( gaussian_kernel_width, gaussian_kernel_sigma) return SSIM(image1, gaussian_kernel_1d).ssim_value(image2)
python
{ "resource": "" }
q270127
SyncObj.destroy
test
def destroy(self): """ Correctly destroy SyncObj. Stop autoTickThread, close connections, etc. """ if self.__conf.autoTick: self.__destroying = True else: self._doDestroy()
python
{ "resource": "" }
q270128
SyncObj.setCodeVersion
test
def setCodeVersion(self, newVersion, callback = None): """Switch to a new code version on all cluster nodes. You should ensure that cluster nodes are updated, otherwise they won't be able to apply commands. :param newVersion: new code version :type int :param callback: will be called on cussess or fail :type callback: function(`FAIL_REASON <#pysyncobj.FAIL_REASON>`_, None) """ assert isinstance(newVersion, int) if newVersion > self.__selfCodeVersion: raise Exception('wrong version, current version is %d, requested version is %d' % (self.__selfCodeVersion, newVersion)) if newVersion < self.__enabledCodeVersion: raise Exception('wrong version, enabled version is %d, requested version is %d' % (self.__enabledCodeVersion, newVersion)) self._applyCommand(pickle.dumps(newVersion), callback, _COMMAND_TYPE.VERSION)
python
{ "resource": "" }
q270129
SyncObj.getStatus
test
def getStatus(self): """Dumps different debug info about cluster to dict and return it""" status = {} status['version'] = VERSION status['revision'] = REVISION status['self'] = self.__selfNode status['state'] = self.__raftState status['leader'] = self.__raftLeader status['partner_nodes_count'] = len(self.__otherNodes) for node in self.__otherNodes: status['partner_node_status_server_' + node.id] = 2 if node in self.__connectedNodes else 0 status['readonly_nodes_count'] = len(self.__readonlyNodes) for node in self.__readonlyNodes: status['readonly_node_status_server_' + node.id] = 2 if node in self.__connectedNodes else 0 status['log_len'] = len(self.__raftLog) status['last_applied'] = self.__raftLastApplied status['commit_idx'] = self.__raftCommitIndex status['raft_term'] = self.__raftCurrentTerm status['next_node_idx_count'] = len(self.__raftNextIndex) for node, idx in iteritems(self.__raftNextIndex): status['next_node_idx_server_' + node.id] = idx status['match_idx_count'] = len(self.__raftMatchIndex) for node, idx in iteritems(self.__raftMatchIndex): status['match_idx_server_' + node.id] = idx status['leader_commit_idx'] = self.__leaderCommitIndex status['uptime'] = int(time.time() - self.__startTime) status['self_code_version'] = self.__selfCodeVersion status['enabled_code_version'] = self.__enabledCodeVersion return status
python
{ "resource": "" }
q270130
SyncObj.printStatus
test
def printStatus(self): """Dumps different debug info about cluster to default logger""" status = self.getStatus() for k, v in iteritems(status): logging.info('%s: %s' % (str(k), str(v)))
python
{ "resource": "" }
q270131
TCPTransport._connToNode
test
def _connToNode(self, conn): """ Find the node to which a connection belongs. :param conn: connection object :type conn: TcpConnection :returns corresponding node or None if the node cannot be found :rtype Node or None """ for node in self._connections: if self._connections[node] is conn: return node return None
python
{ "resource": "" }
q270132
TCPTransport._maybeBind
test
def _maybeBind(self): """ Bind the server unless it is already bound, this is a read-only node, or the last attempt was too recently. :raises TransportNotReadyError if the bind attempt fails """ if self._ready or self._selfIsReadonlyNode or time.time() < self._lastBindAttemptTime + self._syncObj.conf.bindRetryTime: return self._lastBindAttemptTime = time.time() try: self._server.bind() except Exception as e: self._bindAttempts += 1 if self._syncObj.conf.maxBindRetries and self._bindAttempts >= self._syncObj.conf.maxBindRetries: self._bindOverEvent.set() raise TransportNotReadyError else: self._ready = True self._bindOverEvent.set()
python
{ "resource": "" }
q270133
TCPTransport._onNewIncomingConnection
test
def _onNewIncomingConnection(self, conn): """ Callback for connections initiated by the other side :param conn: connection object :type conn: TcpConnection """ self._unknownConnections.add(conn) encryptor = self._syncObj.encryptor if encryptor: conn.encryptor = encryptor conn.setOnMessageReceivedCallback(functools.partial(self._onIncomingMessageReceived, conn)) conn.setOnDisconnectedCallback(functools.partial(self._onDisconnected, conn))
python
{ "resource": "" }
q270134
TCPTransport._onIncomingMessageReceived
test
def _onIncomingMessageReceived(self, conn, message): """ Callback for initial messages on incoming connections. Handles encryption, utility messages, and association of the connection with a Node. Once this initial setup is done, the relevant connected callback is executed, and further messages are deferred to the onMessageReceived callback. :param conn: connection object :type conn: TcpConnection :param message: received message :type message: any """ if self._syncObj.encryptor and not conn.sendRandKey: conn.sendRandKey = message conn.recvRandKey = os.urandom(32) conn.send(conn.recvRandKey) return # Utility messages if isinstance(message, list): done = False try: if message[0] == 'status': conn.send(self._syncObj.getStatus()) done = True elif message[0] == 'add': self._syncObj.addNodeToCluster(message[1], callback = functools.partial(self._utilityCallback, conn = conn, cmd = 'ADD', arg = message[1])) done = True elif message[0] == 'remove': if message[1] == self._selfNode.address: conn.send('FAIL REMOVE ' + message[1]) else: self._syncObj.removeNodeFromCluster(message[1], callback = functools.partial(self._utilityCallback, conn = conn, cmd = 'REMOVE', arg = message[1])) done = True elif message[0] == 'set_version': self._syncObj.setCodeVersion(message[1], callback = functools.partial(self._utilityCallback, conn = conn, cmd = 'SET_VERSION', arg = str(message[1]))) done = True except Exception as e: conn.send(str(e)) done = True if done: return # At this point, message should be either a node ID (i.e. address) or 'readonly' node = self._nodeAddrToNode[message] if message in self._nodeAddrToNode else None if node is None and message != 'readonly': conn.disconnect() self._unknownConnections.discard(conn) return readonly = node is None if readonly: nodeId = str(self._readonlyNodesCounter) node = Node(nodeId) self._readonlyNodes.add(node) self._readonlyNodesCounter += 1 self._unknownConnections.discard(conn) self._connections[node] = conn conn.setOnMessageReceivedCallback(functools.partial(self._onMessageReceived, node)) if not readonly: self._onNodeConnected(node) else: self._onReadonlyNodeConnected(node)
python
{ "resource": "" }
q270135
TCPTransport._utilityCallback
test
def _utilityCallback(self, res, err, conn, cmd, arg): """ Callback for the utility messages :param res: result of the command :param err: error code (one of pysyncobj.config.FAIL_REASON) :param conn: utility connection :param cmd: command :param arg: command arguments """ cmdResult = 'FAIL' if err == FAIL_REASON.SUCCESS: cmdResult = 'SUCCESS' conn.send(cmdResult + ' ' + cmd + ' ' + arg)
python
{ "resource": "" }
q270136
TCPTransport._shouldConnect
test
def _shouldConnect(self, node): """ Check whether this node should initiate a connection to another node :param node: the other node :type node: Node """ return isinstance(node, TCPNode) and node not in self._preventConnectNodes and (self._selfIsReadonlyNode or self._selfNode.address > node.address)
python
{ "resource": "" }
q270137
TCPTransport._connectIfNecessarySingle
test
def _connectIfNecessarySingle(self, node): """ Connect to a node if necessary. :param node: node to connect to :type node: Node """ if node in self._connections and self._connections[node].state != CONNECTION_STATE.DISCONNECTED: return True if not self._shouldConnect(node): return False assert node in self._connections # Since we "should connect" to this node, there should always be a connection object already in place. if node in self._lastConnectAttempt and time.time() - self._lastConnectAttempt[node] < self._syncObj.conf.connectionRetryTime: return False self._lastConnectAttempt[node] = time.time() return self._connections[node].connect(node.ip, node.port)
python
{ "resource": "" }
q270138
TCPTransport._onOutgoingConnected
test
def _onOutgoingConnected(self, conn): """ Callback for when a new connection from this to another node is established. Handles encryption and informs the other node which node this is. If encryption is disabled, this triggers the onNodeConnected callback and messages are deferred to the onMessageReceived callback. If encryption is enabled, the first message is handled by _onOutgoingMessageReceived. :param conn: connection object :type conn: TcpConnection """ if self._syncObj.encryptor: conn.setOnMessageReceivedCallback(functools.partial(self._onOutgoingMessageReceived, conn)) # So we can process the sendRandKey conn.recvRandKey = os.urandom(32) conn.send(conn.recvRandKey) else: # The onMessageReceived callback is configured in addNode already. if not self._selfIsReadonlyNode: conn.send(self._selfNode.address) else: conn.send('readonly') self._onNodeConnected(self._connToNode(conn))
python
{ "resource": "" }
q270139
TCPTransport._onOutgoingMessageReceived
test
def _onOutgoingMessageReceived(self, conn, message): """ Callback for receiving a message on a new outgoing connection. Used only if encryption is enabled to exchange the random keys. Once the key exchange is done, this triggers the onNodeConnected callback, and further messages are deferred to the onMessageReceived callback. :param conn: connection object :type conn: TcpConnection :param message: received message :type message: any """ if not conn.sendRandKey: conn.sendRandKey = message conn.send(self._selfNode.address) node = self._connToNode(conn) conn.setOnMessageReceivedCallback(functools.partial(self._onMessageReceived, node)) self._onNodeConnected(node)
python
{ "resource": "" }
q270140
TCPTransport._onDisconnected
test
def _onDisconnected(self, conn): """ Callback for when a connection is terminated or considered dead. Initiates a reconnect if necessary. :param conn: connection object :type conn: TcpConnection """ self._unknownConnections.discard(conn) node = self._connToNode(conn) if node is not None: if node in self._nodes: self._onNodeDisconnected(node) self._connectIfNecessarySingle(node) else: self._readonlyNodes.discard(node) self._onReadonlyNodeDisconnected(node)
python
{ "resource": "" }
q270141
TCPTransport.addNode
test
def addNode(self, node): """ Add a node to the network :param node: node to add :type node: TCPNode """ self._nodes.add(node) self._nodeAddrToNode[node.address] = node if self._shouldConnect(node): conn = TcpConnection(poller = self._syncObj._poller, timeout = self._syncObj.conf.connectionTimeout, sendBufferSize = self._syncObj.conf.sendBufferSize, recvBufferSize = self._syncObj.conf.recvBufferSize) conn.encryptor = self._syncObj.encryptor conn.setOnConnectedCallback(functools.partial(self._onOutgoingConnected, conn)) conn.setOnMessageReceivedCallback(functools.partial(self._onMessageReceived, node)) conn.setOnDisconnectedCallback(functools.partial(self._onDisconnected, conn)) self._connections[node] = conn
python
{ "resource": "" }
q270142
TCPTransport.dropNode
test
def dropNode(self, node): """ Drop a node from the network :param node: node to drop :type node: Node """ conn = self._connections.pop(node, None) if conn is not None: # Calling conn.disconnect() immediately triggers the onDisconnected callback if the connection isn't already disconnected, so this is necessary to prevent the automatic reconnect. self._preventConnectNodes.add(node) conn.disconnect() self._preventConnectNodes.remove(node) if isinstance(node, TCPNode): self._nodes.discard(node) self._nodeAddrToNode.pop(node.address, None) else: self._readonlyNodes.discard(node) self._lastConnectAttempt.pop(node, None)
python
{ "resource": "" }
q270143
TCPTransport.send
test
def send(self, node, message): """ Send a message to a node. Returns False if the connection appears to be dead either before or after actually trying to send the message. :param node: target node :type node: Node :param message: message :param message: any :returns success :rtype bool """ if node not in self._connections or self._connections[node].state != CONNECTION_STATE.CONNECTED: return False self._connections[node].send(message) if self._connections[node].state != CONNECTION_STATE.CONNECTED: return False return True
python
{ "resource": "" }
q270144
TCPTransport.destroy
test
def destroy(self): """ Destroy this transport """ self.setOnMessageReceivedCallback(None) self.setOnNodeConnectedCallback(None) self.setOnNodeDisconnectedCallback(None) self.setOnReadonlyNodeConnectedCallback(None) self.setOnReadonlyNodeDisconnectedCallback(None) for node in self._nodes | self._readonlyNodes: self.dropNode(node) if self._server is not None: self._server.unbind() for conn in self._unknownConnections: conn.disconnect() self._unknownConnections = set()
python
{ "resource": "" }
q270145
ReplQueue.put
test
def put(self, item): """Put an item into the queue. True - if item placed in queue. False - if queue is full and item can not be placed.""" if self.__maxsize and len(self.__data) >= self.__maxsize: return False self.__data.append(item) return True
python
{ "resource": "" }
q270146
ReplPriorityQueue.put
test
def put(self, item): """Put an item into the queue. Items should be comparable, eg. tuples. True - if item placed in queue. False - if queue is full and item can not be placed.""" if self.__maxsize and len(self.__data) >= self.__maxsize: return False heapq.heappush(self.__data, item) return True
python
{ "resource": "" }
q270147
ReplPriorityQueue.get
test
def get(self, default=None): """Extract the smallest item from queue. Return default if queue is empty.""" if not self.__data: return default return heapq.heappop(self.__data)
python
{ "resource": "" }
q270148
ReplLockManager.tryAcquire
test
def tryAcquire(self, lockID, callback=None, sync=False, timeout=None): """Attempt to acquire lock. :param lockID: unique lock identifier. :type lockID: str :param sync: True - to wait until lock is acquired or failed to acquire. :type sync: bool :param callback: if sync is False - callback will be called with operation result. :type callback: func(opResult, error) :param timeout: max operation time (default - unlimited) :type timeout: float :return True if acquired, False - somebody else already acquired lock """ return self.__lockImpl.acquire(lockID, self.__selfID, time.time(), callback=callback, sync=sync, timeout=timeout)
python
{ "resource": "" }
q270149
ReplLockManager.isAcquired
test
def isAcquired(self, lockID): """Check if lock is acquired by ourselves. :param lockID: unique lock identifier. :type lockID: str :return True if lock is acquired by ourselves. """ return self.__lockImpl.isAcquired(lockID, self.__selfID, time.time())
python
{ "resource": "" }
q270150
ReplLockManager.release
test
def release(self, lockID, callback=None, sync=False, timeout=None): """ Release previously-acquired lock. :param lockID: unique lock identifier. :type lockID: str :param sync: True - to wait until lock is released or failed to release. :type sync: bool :param callback: if sync is False - callback will be called with operation result. :type callback: func(opResult, error) :param timeout: max operation time (default - unlimited) :type timeout: float """ self.__lockImpl.release(lockID, self.__selfID, callback=callback, sync=sync, timeout=timeout)
python
{ "resource": "" }
q270151
check
test
def check(func): """ Decorator which wraps checks and returns an error response on failure. """ def wrapped(*args, **kwargs): check_name = func.__name__ arg_name = None if args: arg_name = args[0] try: if arg_name: logger.debug("Checking '%s' for '%s'", check_name, arg_name) else: logger.debug("Checking '%s'", check_name) response = func(*args, **kwargs) except Exception as e: message = str(e) response = { "ok": False, "error": message, "stacktrace": traceback.format_exc(), } # The check contains several individual checks (e.g., one per # database). Preface the results by name. if arg_name: response = {arg_name: response} logger.exception( "Error calling '%s' for '%s': %s", check_name, arg_name, message ) else: logger.exception( "Error calling '%s': %s", check_name, message ) return response return wrapped
python
{ "resource": "" }
q270152
token_required
test
def token_required(view_func): """ Decorator which ensures that one of the WATCHMAN_TOKENS is provided if set. WATCHMAN_TOKEN_NAME can also be set if the token GET parameter must be customized. """ def _parse_auth_header(auth_header): """ Parse the `Authorization` header Expected format: `WATCHMAN-TOKEN Token="ABC123"` """ # TODO: Figure out full set of allowed characters # http://stackoverflow.com/questions/19028068/illegal-characters-in-http-headers # https://www.w3.org/Protocols/rfc2616/rfc2616-sec2.html#sec2.2 # https://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2 reg = re.compile('(\w+)[=] ?"?([\w-]+)"?') header_dict = dict(reg.findall(auth_header)) return header_dict['Token'] def _get_passed_token(request): """ Try to get the passed token, starting with the header and fall back to `GET` param """ try: auth_header = request.META['HTTP_AUTHORIZATION'] token = _parse_auth_header(auth_header) except KeyError: token = request.GET.get(settings.WATCHMAN_TOKEN_NAME) return token def _validate_token(request): if settings.WATCHMAN_TOKENS: watchman_tokens = settings.WATCHMAN_TOKENS.split(',') elif settings.WATCHMAN_TOKEN: watchman_tokens = [settings.WATCHMAN_TOKEN, ] else: return True return _get_passed_token(request) in watchman_tokens @csrf_exempt @wraps(view_func) def _wrapped_view(request, *args, **kwargs): if _validate_token(request): return view_func(request, *args, **kwargs) return HttpResponseForbidden() return _wrapped_view
python
{ "resource": "" }
q270153
set_hosts
test
def set_hosts(hosts, use_ssl=False, ssl_cert_path=None): """ Sets the Elasticsearch hosts to use Args: hosts (str): A single hostname or URL, or list of hostnames or URLs use_ssl (bool): Use a HTTPS connection to the server ssl_cert_path (str): Path to the certificate chain """ if type(hosts) != list: hosts = [hosts] conn_params = { "hosts": hosts, "timeout": 20 } if use_ssl: conn_params['use_ssl'] = True if ssl_cert_path: conn_params['verify_certs'] = True conn_params['ca_certs'] = ssl_cert_path else: conn_params['verify_certs'] = False connections.create_connection(**conn_params)
python
{ "resource": "" }
q270154
create_indexes
test
def create_indexes(names, settings=None): """ Create Elasticsearch indexes Args: names (list): A list of index names settings (dict): Index settings """ for name in names: index = Index(name) try: if not index.exists(): logger.debug("Creating Elasticsearch index: {0}".format(name)) if settings is None: index.settings(number_of_shards=1, number_of_replicas=1) else: index.settings(**settings) index.create() except Exception as e: raise ElasticsearchError( "Elasticsearch error: {0}".format(e.__str__()))
python
{ "resource": "" }
q270155
migrate_indexes
test
def migrate_indexes(aggregate_indexes=None, forensic_indexes=None): """ Updates index mappings Args: aggregate_indexes (list): A list of aggregate index names forensic_indexes (list): A list of forensic index names """ version = 2 if aggregate_indexes is None: aggregate_indexes = [] if forensic_indexes is None: forensic_indexes = [] for aggregate_index_name in aggregate_indexes: if not Index(aggregate_index_name).exists(): continue aggregate_index = Index(aggregate_index_name) doc = "doc" fo_field = "published_policy.fo" fo = "fo" fo_mapping = aggregate_index.get_field_mapping(fields=[fo_field]) fo_mapping = fo_mapping[list(fo_mapping.keys())[0]]["mappings"] if doc not in fo_mapping: continue fo_mapping = fo_mapping[doc][fo_field]["mapping"][fo] fo_type = fo_mapping["type"] if fo_type == "long": new_index_name = "{0}-v{1}".format(aggregate_index_name, version) body = {"properties": {"published_policy.fo": { "type": "text", "fields": { "keyword": { "type": "keyword", "ignore_above": 256 } } } } } Index(new_index_name).create() Index(new_index_name).put_mapping(doc_type=doc, body=body) reindex(connections.get_connection(), aggregate_index_name, new_index_name) Index(aggregate_index_name).delete() for forensic_index in forensic_indexes: pass
python
{ "resource": "" }
q270156
KafkaClient.strip_metadata
test
def strip_metadata(report): """ Duplicates org_name, org_email and report_id into JSON root and removes report_metadata key to bring it more inline with Elastic output. """ report['org_name'] = report['report_metadata']['org_name'] report['org_email'] = report['report_metadata']['org_email'] report['report_id'] = report['report_metadata']['report_id'] report.pop('report_metadata') return report
python
{ "resource": "" }
q270157
KafkaClient.save_aggregate_reports_to_kafka
test
def save_aggregate_reports_to_kafka(self, aggregate_reports, aggregate_topic): """ Saves aggregate DMARC reports to Kafka Args: aggregate_reports (list): A list of aggregate report dictionaries to save to Kafka aggregate_topic (str): The name of the Kafka topic """ if (type(aggregate_reports) == dict or type(aggregate_reports) == OrderedDict): aggregate_reports = [aggregate_reports] if len(aggregate_reports) < 1: return for report in aggregate_reports: report['date_range'] = self.generate_daterange(report) report = self.strip_metadata(report) for slice in report['records']: slice['date_range'] = report['date_range'] slice['org_name'] = report['org_name'] slice['org_email'] = report['org_email'] slice['policy_published'] = report['policy_published'] slice['report_id'] = report['report_id'] logger.debug("Sending slice.") try: logger.debug("Saving aggregate report to Kafka") self.producer.send(aggregate_topic, slice) except UnknownTopicOrPartitionError: raise KafkaError( "Kafka error: Unknown topic or partition on broker") except Exception as e: raise KafkaError( "Kafka error: {0}".format(e.__str__())) try: self.producer.flush() except Exception as e: raise KafkaError( "Kafka error: {0}".format(e.__str__()))
python
{ "resource": "" }
q270158
extract_xml
test
def extract_xml(input_): """ Extracts xml from a zip or gzip file at the given path, file-like object, or bytes. Args: input_: A path to a file, a file like object, or bytes Returns: str: The extracted XML """ if type(input_) == str: file_object = open(input_, "rb") elif type(input_) == bytes: file_object = BytesIO(input_) else: file_object = input_ try: header = file_object.read(6) file_object.seek(0) if header.startswith(MAGIC_ZIP): _zip = zipfile.ZipFile(file_object) xml = _zip.open(_zip.namelist()[0]).read().decode() elif header.startswith(MAGIC_GZIP): xml = GzipFile(fileobj=file_object).read().decode() elif header.startswith(MAGIC_XML): xml = file_object.read().decode() else: file_object.close() raise InvalidAggregateReport("Not a valid zip, gzip, or xml file") file_object.close() except UnicodeDecodeError: raise InvalidAggregateReport("File objects must be opened in binary " "(rb) mode") except Exception as error: raise InvalidAggregateReport( "Invalid archive file: {0}".format(error.__str__())) return xml
python
{ "resource": "" }
q270159
parse_aggregate_report_file
test
def parse_aggregate_report_file(_input, nameservers=None, dns_timeout=2.0, parallel=False): """Parses a file at the given path, a file-like object. or bytes as a aggregate DMARC report Args: _input: A path to a file, a file like object, or bytes nameservers (list): A list of one or more nameservers to use (Cloudflare's public DNS resolvers by default) dns_timeout (float): Sets the DNS timeout in seconds parallel (bool): Parallel processing Returns: OrderedDict: The parsed DMARC aggregate report """ xml = extract_xml(_input) return parse_aggregate_report_xml(xml, nameservers=nameservers, timeout=dns_timeout, parallel=parallel)
python
{ "resource": "" }
q270160
parsed_forensic_reports_to_csv
test
def parsed_forensic_reports_to_csv(reports): """ Converts one or more parsed forensic reports to flat CSV format, including headers Args: reports: A parsed forensic report or list of parsed forensic reports Returns: str: Parsed forensic report data in flat CSV format, including headers """ fields = ["feedback_type", "user_agent", "version", "original_envelope_id", "original_mail_from", "original_rcpt_to", "arrival_date", "arrival_date_utc", "subject", "message_id", "authentication_results", "dkim_domain", "source_ip_address", "source_country", "source_reverse_dns", "source_base_domain", "delivery_result", "auth_failure", "reported_domain", "authentication_mechanisms", "sample_headers_only"] if type(reports) == OrderedDict: reports = [reports] csv_file = StringIO() csv_writer = DictWriter(csv_file, fieldnames=fields) csv_writer.writeheader() for report in reports: row = report.copy() row["source_ip_address"] = report["source"]["ip_address"] row["source_reverse_dns"] = report["source"]["reverse_dns"] row["source_base_domain"] = report["source"]["base_domain"] row["source_country"] = report["source"]["country"] del row["source"] row["subject"] = report["parsed_sample"]["subject"] row["auth_failure"] = ",".join(report["auth_failure"]) authentication_mechanisms = report["authentication_mechanisms"] row["authentication_mechanisms"] = ",".join( authentication_mechanisms) del row["sample"] del row["parsed_sample"] csv_writer.writerow(row) return csv_file.getvalue()
python
{ "resource": "" }
q270161
parse_report_file
test
def parse_report_file(input_, nameservers=None, dns_timeout=2.0, strip_attachment_payloads=False, parallel=False): """Parses a DMARC aggregate or forensic file at the given path, a file-like object. or bytes Args: input_: A path to a file, a file like object, or bytes nameservers (list): A list of one or more nameservers to use (Cloudflare's public DNS resolvers by default) dns_timeout (float): Sets the DNS timeout in seconds strip_attachment_payloads (bool): Remove attachment payloads from forensic report results parallel (bool): Parallel processing Returns: OrderedDict: The parsed DMARC report """ if type(input_) == str: file_object = open(input_, "rb") elif type(input_) == bytes: file_object = BytesIO(input_) else: file_object = input_ content = file_object.read() try: report = parse_aggregate_report_file(content, nameservers=nameservers, dns_timeout=dns_timeout, parallel=parallel) results = OrderedDict([("report_type", "aggregate"), ("report", report)]) except InvalidAggregateReport: try: sa = strip_attachment_payloads results = parse_report_email(content, nameservers=nameservers, dns_timeout=dns_timeout, strip_attachment_payloads=sa, parallel=parallel) except InvalidDMARCReport: raise InvalidDMARCReport("Not a valid aggregate or forensic " "report") return results
python
{ "resource": "" }
q270162
get_imap_capabilities
test
def get_imap_capabilities(server): """ Returns a list of an IMAP server's capabilities Args: server (imapclient.IMAPClient): An instance of imapclient.IMAPClient Returns (list): A list of capabilities """ capabilities = list(map(str, list(server.capabilities()))) for i in range(len(capabilities)): capabilities[i] = str(capabilities[i]).replace("b'", "").replace("'", "") logger.debug("IMAP server supports: {0}".format(capabilities)) return capabilities
python
{ "resource": "" }
q270163
save_output
test
def save_output(results, output_directory="output"): """ Save report data in the given directory Args: results (OrderedDict): Parsing results output_directory: The patch to the directory to save in """ aggregate_reports = results["aggregate_reports"] forensic_reports = results["forensic_reports"] if os.path.exists(output_directory): if not os.path.isdir(output_directory): raise ValueError("{0} is not a directory".format(output_directory)) else: os.makedirs(output_directory) with open("{0}".format(os.path.join(output_directory, "aggregate.json")), "w", newline="\n", encoding="utf-8") as agg_json: agg_json.write(json.dumps(aggregate_reports, ensure_ascii=False, indent=2)) with open("{0}".format(os.path.join(output_directory, "aggregate.csv")), "w", newline="\n", encoding="utf-8") as agg_csv: csv = parsed_aggregate_reports_to_csv(aggregate_reports) agg_csv.write(csv) with open("{0}".format(os.path.join(output_directory, "forensic.json")), "w", newline="\n", encoding="utf-8") as for_json: for_json.write(json.dumps(forensic_reports, ensure_ascii=False, indent=2)) with open("{0}".format(os.path.join(output_directory, "forensic.csv")), "w", newline="\n", encoding="utf-8") as for_csv: csv = parsed_forensic_reports_to_csv(forensic_reports) for_csv.write(csv) samples_directory = os.path.join(output_directory, "samples") if not os.path.exists(samples_directory): os.makedirs(samples_directory) sample_filenames = [] for forensic_report in forensic_reports: sample = forensic_report["sample"] message_count = 0 parsed_sample = forensic_report["parsed_sample"] subject = parsed_sample["filename_safe_subject"] filename = subject while filename in sample_filenames: message_count += 1 filename = "{0} ({1})".format(subject, message_count) sample_filenames.append(filename) filename = "{0}.eml".format(filename) path = os.path.join(samples_directory, filename) with open(path, "w", newline="\n", encoding="utf-8") as sample_file: sample_file.write(sample)
python
{ "resource": "" }
q270164
get_report_zip
test
def get_report_zip(results): """ Creates a zip file of parsed report output Args: results (OrderedDict): The parsed results Returns: bytes: zip file bytes """ def add_subdir(root_path, subdir): subdir_path = os.path.join(root_path, subdir) for subdir_root, subdir_dirs, subdir_files in os.walk(subdir_path): for subdir_file in subdir_files: subdir_file_path = os.path.join(root_path, subdir, subdir_file) if os.path.isfile(subdir_file_path): rel_path = os.path.relpath(subdir_root, subdir_file_path) subdir_arc_name = os.path.join(rel_path, subdir_file) zip_file.write(subdir_file_path, subdir_arc_name) for subdir in subdir_dirs: add_subdir(subdir_path, subdir) storage = BytesIO() tmp_dir = tempfile.mkdtemp() try: save_output(results, tmp_dir) with zipfile.ZipFile(storage, 'w', zipfile.ZIP_DEFLATED) as zip_file: for root, dirs, files in os.walk(tmp_dir): for file in files: file_path = os.path.join(root, file) if os.path.isfile(file_path): arcname = os.path.join(os.path.relpath(root, tmp_dir), file) zip_file.write(file_path, arcname) for directory in dirs: dir_path = os.path.join(root, directory) if os.path.isdir(dir_path): zip_file.write(dir_path, directory) add_subdir(root, directory) finally: shutil.rmtree(tmp_dir) return storage.getvalue()
python
{ "resource": "" }
q270165
email_results
test
def email_results(results, host, mail_from, mail_to, port=0, ssl=False, user=None, password=None, subject=None, attachment_filename=None, message=None, ssl_context=None): """ Emails parsing results as a zip file Args: results (OrderedDict): Parsing results host: Mail server hostname or IP address mail_from: The value of the message from header mail_to : A list of addresses to mail to port (int): Port to use ssl (bool): Require a SSL connection from the start user: An optional username password: An optional password subject: Overrides the default message subject attachment_filename: Override the default attachment filename message: Override the default plain text body ssl_context: SSL context options """ logging.debug("Emailing report to: {0}".format(",".join(mail_to))) date_string = datetime.now().strftime("%Y-%m-%d") if attachment_filename: if not attachment_filename.lower().endswith(".zip"): attachment_filename += ".zip" filename = attachment_filename else: filename = "DMARC-{0}.zip".format(date_string) assert isinstance(mail_to, list) msg = MIMEMultipart() msg['From'] = mail_from msg['To'] = ", ".join(mail_to) msg['Date'] = email.utils.formatdate(localtime=True) msg['Subject'] = subject or "DMARC results for {0}".format(date_string) text = message or "Please see the attached zip file\n" msg.attach(MIMEText(text)) zip_bytes = get_report_zip(results) part = MIMEApplication(zip_bytes, Name=filename) part['Content-Disposition'] = 'attachment; filename="{0}"'.format(filename) msg.attach(part) try: if ssl_context is None: ssl_context = create_default_context() if ssl: server = smtplib.SMTP_SSL(host, port=port, context=ssl_context) server.connect(host, port) server.ehlo_or_helo_if_needed() else: server = smtplib.SMTP(host, port=port) server.connect(host, port) server.ehlo_or_helo_if_needed() if server.has_extn("starttls"): server.starttls(context=ssl_context) server.ehlo() else: logger.warning("SMTP server does not support STARTTLS. " "Proceeding in plain text!") if user and password: server.login(user, password) server.sendmail(mail_from, mail_to, msg.as_string()) except smtplib.SMTPException as error: error = error.__str__().lstrip("b'").rstrip("'").rstrip(".") raise SMTPError(error) except socket.gaierror: raise SMTPError("DNS resolution failed") except ConnectionRefusedError: raise SMTPError("Connection refused") except ConnectionResetError: raise SMTPError("Connection reset") except ConnectionAbortedError: raise SMTPError("Connection aborted") except TimeoutError: raise SMTPError("Connection timed out") except SSLError as error: raise SMTPError("SSL error: {0}".format(error.__str__())) except CertificateError as error: raise SMTPError("Certificate error: {0}".format(error.__str__()))
python
{ "resource": "" }
q270166
HECClient.save_aggregate_reports_to_splunk
test
def save_aggregate_reports_to_splunk(self, aggregate_reports): """ Saves aggregate DMARC reports to Splunk Args: aggregate_reports: A list of aggregate report dictionaries to save in Splunk """ logger.debug("Saving aggregate reports to Splunk") if type(aggregate_reports) == dict: aggregate_reports = [aggregate_reports] if len(aggregate_reports) < 1: return data = self._common_data.copy() json_str = "" for report in aggregate_reports: for record in report["records"]: new_report = dict() for metadata in report["report_metadata"]: new_report[metadata] = report["report_metadata"][metadata] new_report["published_policy"] = report["policy_published"] new_report["source_ip_address"] = record["source"][ "ip_address"] new_report["source_country"] = record["source"]["country"] new_report["source_reverse_dns"] = record["source"][ "reverse_dns"] new_report["source_base_domain"] = record["source"][ "base_domain"] new_report["message_count"] = record["count"] new_report["disposition"] = record["policy_evaluated"][ "disposition" ] new_report["spf_aligned"] = record["alignment"]["spf"] new_report["dkim_aligned"] = record["alignment"]["dkim"] new_report["passed_dmarc"] = record["alignment"]["dmarc"] new_report["header_from"] = record["identifiers"][ "header_from"] new_report["envelope_from"] = record["identifiers"][ "envelope_from"] if "dkim" in record["auth_results"]: new_report["dkim_results"] = record["auth_results"][ "dkim"] if "spf" in record["auth_results"]: new_report["spf_results"] = record["auth_results"][ "spf"] data["sourcetype"] = "dmarc:aggregate" timestamp = human_timestamp_to_timestamp( new_report["begin_date"]) data["time"] = timestamp data["event"] = new_report.copy() json_str += "{0}\n".format(json.dumps(data)) if not self.session.verify: logger.debug("Skipping certificate verification for Splunk HEC") try: response = self.session.post(self.url, data=json_str, timeout=self.timeout) response = response.json() except Exception as e: raise SplunkError(e.__str__()) if response["code"] != 0: raise SplunkError(response["text"])
python
{ "resource": "" }
q270167
HECClient.save_forensic_reports_to_splunk
test
def save_forensic_reports_to_splunk(self, forensic_reports): """ Saves forensic DMARC reports to Splunk Args: forensic_reports (list): A list of forensic report dictionaries to save in Splunk """ logger.debug("Saving forensic reports to Splunk") if type(forensic_reports) == dict: forensic_reports = [forensic_reports] if len(forensic_reports) < 1: return json_str = "" for report in forensic_reports: data = self._common_data.copy() data["sourcetype"] = "dmarc:forensic" timestamp = human_timestamp_to_timestamp( report["arrival_date_utc"]) data["time"] = timestamp data["event"] = report.copy() json_str += "{0}\n".format(json.dumps(data)) if not self.session.verify: logger.debug("Skipping certificate verification for Splunk HEC") try: response = self.session.post(self.url, data=json_str, timeout=self.timeout) response = response.json() except Exception as e: raise SplunkError(e.__str__()) if response["code"] != 0: raise SplunkError(response["text"])
python
{ "resource": "" }
q270168
decode_base64
test
def decode_base64(data): """ Decodes a base64 string, with padding being optional Args: data: A base64 encoded string Returns: bytes: The decoded bytes """ data = bytes(data, encoding="ascii") missing_padding = len(data) % 4 if missing_padding != 0: data += b'=' * (4 - missing_padding) return base64.b64decode(data)
python
{ "resource": "" }
q270169
get_base_domain
test
def get_base_domain(domain, use_fresh_psl=False): """ Gets the base domain name for the given domain .. note:: Results are based on a list of public domain suffixes at https://publicsuffix.org/list/public_suffix_list.dat. Args: domain (str): A domain or subdomain use_fresh_psl (bool): Download a fresh Public Suffix List Returns: str: The base domain of the given domain """ psl_path = os.path.join(tempdir, "public_suffix_list.dat") def download_psl(): url = "https://publicsuffix.org/list/public_suffix_list.dat" # Use a browser-like user agent string to bypass some proxy blocks headers = {"User-Agent": USER_AGENT} fresh_psl = requests.get(url, headers=headers).text with open(psl_path, "w", encoding="utf-8") as fresh_psl_file: fresh_psl_file.write(fresh_psl) if use_fresh_psl: if not os.path.exists(psl_path): download_psl() else: psl_age = datetime.now() - datetime.fromtimestamp( os.stat(psl_path).st_mtime) if psl_age > timedelta(hours=24): try: download_psl() except Exception as error: logger.warning( "Failed to download an updated PSL {0}".format(error)) with open(psl_path, encoding="utf-8") as psl_file: psl = publicsuffix2.PublicSuffixList(psl_file) return psl.get_public_suffix(domain) else: return publicsuffix2.get_public_suffix(domain)
python
{ "resource": "" }
q270170
get_reverse_dns
test
def get_reverse_dns(ip_address, cache=None, nameservers=None, timeout=2.0): """ Resolves an IP address to a hostname using a reverse DNS query Args: ip_address (str): The IP address to resolve cache (ExpiringDict): Cache storage nameservers (list): A list of one or more nameservers to use (Cloudflare's public DNS resolvers by default) timeout (float): Sets the DNS query timeout in seconds Returns: str: The reverse DNS hostname (if any) """ hostname = None try: address = dns.reversename.from_address(ip_address) hostname = query_dns(address, "PTR", cache=cache, nameservers=nameservers, timeout=timeout)[0] except dns.exception.DNSException: pass return hostname
python
{ "resource": "" }
q270171
human_timestamp_to_datetime
test
def human_timestamp_to_datetime(human_timestamp, to_utc=False): """ Converts a human-readable timestamp into a Python ``DateTime`` object Args: human_timestamp (str): A timestamp string to_utc (bool): Convert the timestamp to UTC Returns: DateTime: The converted timestamp """ settings = {} if to_utc: settings = {"TO_TIMEZONE": "UTC"} return dateparser.parse(human_timestamp, settings=settings)
python
{ "resource": "" }
q270172
get_ip_address_country
test
def get_ip_address_country(ip_address, parallel=False): """ Uses the MaxMind Geolite2 Country database to return the ISO code for the country associated with the given IPv4 or IPv6 address Args: ip_address (str): The IP address to query for parallel (bool): Parallel processing Returns: str: And ISO country code associated with the given IP address """ def download_country_database(location="GeoLite2-Country.mmdb"): """Downloads the MaxMind Geolite2 Country database Args: location (str): Local location for the database file """ if parallel: logging.warning("Cannot download GeoIP database in parallel mode") return url = "https://geolite.maxmind.com/download/geoip/database/" \ "GeoLite2-Country.tar.gz" # Use a browser-like user agent string to bypass some proxy blocks headers = {"User-Agent": USER_AGENT} original_filename = "GeoLite2-Country.mmdb" try: response = requests.get(url, headers=headers) response.raise_for_status() tar_bytes = response.content tar_file = tarfile.open(fileobj=BytesIO(tar_bytes), mode="r:gz") tar_dir = tar_file.getnames()[0] tar_path = "{0}/{1}".format(tar_dir, original_filename) tar_file.extract(tar_path) shutil.move(tar_path, location) shutil.rmtree(tar_dir) except Exception as e: logger.warning("Error downloading {0}: {1}".format(url, e.__str__())) system_paths = [ "GeoLite2-Country.mmdb", "/usr/local/share/GeoIP/GeoLite2-Country.mmdb", "/usr/share/GeoIP/GeoLite2-Country.mmdb", "/var/lib/GeoIP/GeoLite2-Country.mmdb", "/var/local/lib/GeoIP/GeoLite2-Country.mmdb", "C:\\GeoIP\\GeoLite2-Country.mmdb" ] db_path = None for system_path in system_paths: if os.path.exists(system_path): db_path = system_path break if db_path is None: db_path = os.path.join(tempdir, "GeoLite2-Country.mmdb") if not os.path.exists(db_path): download_country_database(db_path) if not os.path.exists(db_path): return None else: db_age = datetime.now() - datetime.fromtimestamp( os.stat(db_path).st_mtime) if db_age > timedelta(days=7): download_country_database() db_path = db_path db_reader = geoip2.database.Reader(db_path) country = None try: country = db_reader.country(ip_address).country.iso_code except geoip2.errors.AddressNotFoundError: pass return country
python
{ "resource": "" }
q270173
get_ip_address_info
test
def get_ip_address_info(ip_address, cache=None, nameservers=None, timeout=2.0, parallel=False): """ Returns reverse DNS and country information for the given IP address Args: ip_address (str): The IP address to check cache (ExpiringDict): Cache storage nameservers (list): A list of one or more nameservers to use (Cloudflare's public DNS resolvers by default) timeout (float): Sets the DNS timeout in seconds parallel (bool): parallel processing Returns: OrderedDict: ``ip_address``, ``reverse_dns`` """ ip_address = ip_address.lower() if cache: info = cache.get(ip_address, None) if info: return info info = OrderedDict() info["ip_address"] = ip_address reverse_dns = get_reverse_dns(ip_address, nameservers=nameservers, timeout=timeout) country = get_ip_address_country(ip_address, parallel=parallel) info["country"] = country info["reverse_dns"] = reverse_dns info["base_domain"] = None if reverse_dns is not None: base_domain = get_base_domain(reverse_dns) info["base_domain"] = base_domain return info
python
{ "resource": "" }
q270174
convert_outlook_msg
test
def convert_outlook_msg(msg_bytes): """ Uses the ``msgconvert`` Perl utility to convert an Outlook MS file to standard RFC 822 format Args: msg_bytes (bytes): the content of the .msg file Returns: A RFC 822 string """ if not is_outlook_msg(msg_bytes): raise ValueError("The supplied bytes are not an Outlook MSG file") orig_dir = os.getcwd() tmp_dir = tempfile.mkdtemp() os.chdir(tmp_dir) with open("sample.msg", "wb") as msg_file: msg_file.write(msg_bytes) try: subprocess.check_call(["msgconvert", "sample.msg"], stdout=null_file, stderr=null_file) eml_path = "sample.eml" with open(eml_path, "rb") as eml_file: rfc822 = eml_file.read() except FileNotFoundError: raise EmailParserError( "Failed to convert Outlook MSG: msgconvert utility not found") finally: os.chdir(orig_dir) shutil.rmtree(tmp_dir) return rfc822
python
{ "resource": "" }
q270175
_str_to_list
test
def _str_to_list(s): """Converts a comma separated string to a list""" _list = s.split(",") return list(map(lambda i: i.lstrip(), _list))
python
{ "resource": "" }
q270176
cli_parse
test
def cli_parse(file_path, sa, nameservers, dns_timeout, parallel=False): """Separated this function for multiprocessing""" try: file_results = parse_report_file(file_path, nameservers=nameservers, dns_timeout=dns_timeout, strip_attachment_payloads=sa, parallel=parallel) except ParserError as error: return error, file_path finally: global counter with counter.get_lock(): counter.value += 1 return file_results, file_path
python
{ "resource": "" }
q270177
Client.drain
test
def drain(self, sid=None): """ Drain will put a connection into a drain state. All subscriptions will immediately be put into a drain state. Upon completion, the publishers will be drained and can not publish any additional messages. Upon draining of the publishers, the connection will be closed. Use the `closed_cb' option to know when the connection has moved from draining to closed. If a sid is passed, just the subscription with that sid will be drained without closing the connection. """ if self.is_draining: return if self.is_closed: raise ErrConnectionClosed if self.is_connecting or self.is_reconnecting: raise ErrConnectionReconnecting if sid is not None: return self._drain_sub(sid) # Start draining the subscriptions self._status = Client.DRAINING_SUBS drain_tasks = [] for ssid, sub in self._subs.items(): task = self._drain_sub(ssid) drain_tasks.append(task) drain_is_done = asyncio.gather(*drain_tasks) try: yield from asyncio.wait_for(drain_is_done, self.options["drain_timeout"]) except asyncio.TimeoutError: drain_is_done.exception() drain_is_done.cancel() if self._error_cb is not None: yield from self._error_cb(ErrDrainTimeout) except asyncio.CancelledError: pass finally: self._status = Client.DRAINING_PUBS yield from self.flush() yield from self._close(Client.CLOSED)
python
{ "resource": "" }
q270178
Client.publish
test
def publish(self, subject, payload): """ Sends a PUB command to the server on the specified subject. ->> PUB hello 5 ->> MSG_PAYLOAD: world <<- MSG hello 2 5 """ if self.is_closed: raise ErrConnectionClosed if self.is_draining_pubs: raise ErrConnectionDraining payload_size = len(payload) if payload_size > self._max_payload: raise ErrMaxPayload yield from self._publish(subject, _EMPTY_, payload, payload_size)
python
{ "resource": "" }
q270179
Client.publish_request
test
def publish_request(self, subject, reply, payload): """ Publishes a message tagging it with a reply subscription which can be used by those receiving the message to respond. ->> PUB hello _INBOX.2007314fe0fcb2cdc2a2914c1 5 ->> MSG_PAYLOAD: world <<- MSG hello 2 _INBOX.2007314fe0fcb2cdc2a2914c1 5 """ if self.is_closed: raise ErrConnectionClosed if self.is_draining_pubs: raise ErrConnectionDraining payload_size = len(payload) if payload_size > self._max_payload: raise ErrMaxPayload yield from self._publish(subject, reply.encode(), payload, payload_size)
python
{ "resource": "" }
q270180
Client._publish
test
def _publish(self, subject, reply, payload, payload_size): """ Sends PUB command to the NATS server. """ if subject == "": # Avoid sending messages with empty replies. raise ErrBadSubject payload_size_bytes = ("%d" % payload_size).encode() pub_cmd = b''.join([PUB_OP, _SPC_, subject.encode( ), _SPC_, reply, _SPC_, payload_size_bytes, _CRLF_, payload, _CRLF_]) self.stats['out_msgs'] += 1 self.stats['out_bytes'] += payload_size yield from self._send_command(pub_cmd) if self._flush_queue.empty(): yield from self._flush_pending()
python
{ "resource": "" }
q270181
Client.subscribe_async
test
def subscribe_async(self, subject, **kwargs): """ Sets the subcription to use a task per message to be processed. ..deprecated:: 7.0 Will be removed 9.0. """ kwargs["is_async"] = True sid = yield from self.subscribe(subject, **kwargs) return sid
python
{ "resource": "" }
q270182
Client.unsubscribe
test
def unsubscribe(self, ssid, max_msgs=0): """ Takes a subscription sequence id and removes the subscription from the client, optionally after receiving more than max_msgs. """ if self.is_closed: raise ErrConnectionClosed if self.is_draining: raise ErrConnectionDraining self._remove_sub(ssid, max_msgs) # We will send these for all subs when we reconnect anyway, # so that we can suppress here. if not self.is_reconnecting: yield from self.auto_unsubscribe(ssid, max_msgs)
python
{ "resource": "" }
q270183
Client.flush
test
def flush(self, timeout=60): """ Sends a ping to the server expecting a pong back ensuring what we have written so far has made it to the server and also enabling measuring of roundtrip time. In case a pong is not returned within the allowed timeout, then it will raise ErrTimeout. """ if timeout <= 0: raise ErrBadTimeout if self.is_closed: raise ErrConnectionClosed future = asyncio.Future(loop=self._loop) try: yield from self._send_ping(future) yield from asyncio.wait_for(future, timeout, loop=self._loop) except asyncio.TimeoutError: future.cancel() raise ErrTimeout
python
{ "resource": "" }
q270184
Client._select_next_server
test
def _select_next_server(self): """ Looks up in the server pool for an available server and attempts to connect. """ while True: if len(self._server_pool) == 0: self._current_server = None raise ErrNoServers now = time.monotonic() s = self._server_pool.pop(0) if self.options["max_reconnect_attempts"] > 0: if s.reconnects > self.options["max_reconnect_attempts"]: # Discard server since already tried to reconnect too many times continue # Not yet exceeded max_reconnect_attempts so can still use # this server in the future. self._server_pool.append(s) if s.last_attempt is not None and now < s.last_attempt + self.options["reconnect_time_wait"]: # Backoff connecting to server if we attempted recently. yield from asyncio.sleep(self.options["reconnect_time_wait"], loop=self._loop) try: s.last_attempt = time.monotonic() r, w = yield from asyncio.open_connection( s.uri.hostname, s.uri.port, loop=self._loop, limit=DEFAULT_BUFFER_SIZE) self._current_server = s # We keep a reference to the initial transport we used when # establishing the connection in case we later upgrade to TLS # after getting the first INFO message. This is in order to # prevent the GC closing the socket after we send CONNECT # and replace the transport. # # See https://github.com/nats-io/asyncio-nats/issues/43 self._bare_io_reader = self._io_reader = r self._bare_io_writer = self._io_writer = w break except Exception as e: s.last_attempt = time.monotonic() s.reconnects += 1 self._err = e if self._error_cb is not None: yield from self._error_cb(e) continue
python
{ "resource": "" }
q270185
Client._process_err
test
def _process_err(self, err_msg): """ Processes the raw error message sent by the server and close connection with current server. """ if STALE_CONNECTION in err_msg: yield from self._process_op_err(ErrStaleConnection) return if AUTHORIZATION_VIOLATION in err_msg: self._err = ErrAuthorization else: m = b'nats: ' + err_msg[0] self._err = NatsError(m.decode()) do_cbs = False if not self.is_connecting: do_cbs = True # FIXME: Some errors such as 'Invalid Subscription' # do not cause the server to close the connection. # For now we handle similar as other clients and close. self._loop.create_task(self._close(Client.CLOSED, do_cbs))
python
{ "resource": "" }
q270186
Client._process_op_err
test
def _process_op_err(self, e): """ Process errors which occured while reading or parsing the protocol. If allow_reconnect is enabled it will try to switch the server to which it is currently connected otherwise it will disconnect. """ if self.is_connecting or self.is_closed or self.is_reconnecting: return if self.options["allow_reconnect"] and self.is_connected: self._status = Client.RECONNECTING self._ps.reset() if self._reconnection_task is not None and not self._reconnection_task.cancelled(): # Cancel the previous task in case it may still be running. self._reconnection_task.cancel() self._reconnection_task = self._loop.create_task(self._attempt_reconnect()) else: self._process_disconnect() self._err = e yield from self._close(Client.CLOSED, True)
python
{ "resource": "" }
q270187
Client._connect_command
test
def _connect_command(self): ''' Generates a JSON string with the params to be used when sending CONNECT to the server. ->> CONNECT {"lang": "python3"} ''' options = { "verbose": self.options["verbose"], "pedantic": self.options["pedantic"], "lang": __lang__, "version": __version__, "protocol": PROTOCOL } if "auth_required" in self._server_info: if self._server_info["auth_required"]: # In case there is no password, then consider handle # sending a token instead. if self.options["user"] is not None and self.options["password"] is not None: options["user"] = self.options["user"] options["pass"] = self.options["password"] elif self.options["token"] is not None: options["auth_token"] = self.options["token"] elif self._current_server.uri.password is None: options["auth_token"] = self._current_server.uri.username else: options["user"] = self._current_server.uri.username options["pass"] = self._current_server.uri.password if self.options["name"] is not None: options["name"] = self.options["name"] if self.options["no_echo"] is not None: options["echo"] = not self.options["no_echo"] connect_opts = json.dumps(options, sort_keys=True) return b''.join([CONNECT_OP + _SPC_ + connect_opts.encode() + _CRLF_])
python
{ "resource": "" }
q270188
Client._process_pong
test
def _process_pong(self): """ Process PONG sent by server. """ if len(self._pongs) > 0: future = self._pongs.pop(0) future.set_result(True) self._pongs_received += 1 self._pings_outstanding -= 1
python
{ "resource": "" }
q270189
Client._process_msg
test
def _process_msg(self, sid, subject, reply, data): """ Process MSG sent by server. """ payload_size = len(data) self.stats['in_msgs'] += 1 self.stats['in_bytes'] += payload_size sub = self._subs.get(sid) if sub is None: # Skip in case no subscription present. return sub.received += 1 if sub.max_msgs > 0 and sub.received >= sub.max_msgs: # Enough messages so can throwaway subscription now. self._subs.pop(sid, None) msg = self._build_message(subject, reply, data) # Check if it is an old style request. if sub.future is not None: if sub.future.cancelled(): # Already gave up, nothing to do. return sub.future.set_result(msg) return # Let subscription wait_for_msgs coroutine process the messages, # but in case sending to the subscription task would block, # then consider it to be an slow consumer and drop the message. try: sub.pending_size += payload_size if sub.pending_size >= sub.pending_bytes_limit: # Substract again the bytes since throwing away # the message so would not be pending data. sub.pending_size -= payload_size if self._error_cb is not None: yield from self._error_cb( ErrSlowConsumer(subject=subject, sid=sid)) return sub.pending_queue.put_nowait(msg) except asyncio.QueueFull: if self._error_cb is not None: yield from self._error_cb( ErrSlowConsumer(subject=subject, sid=sid))
python
{ "resource": "" }
q270190
Client._process_info
test
def _process_info(self, info): """ Process INFO lines sent by the server to reconfigure client with latest updates from cluster to enable server discovery. """ if 'connect_urls' in info: if info['connect_urls']: connect_urls = [] for connect_url in info['connect_urls']: uri = urlparse("nats://%s" % connect_url) srv = Srv(uri) srv.discovered = True # Filter for any similar server in the server pool already. should_add = True for s in self._server_pool: if uri.netloc == s.uri.netloc: should_add = False if should_add: connect_urls.append(srv) if self.options["dont_randomize"] is not True: shuffle(connect_urls) for srv in connect_urls: self._server_pool.append(srv)
python
{ "resource": "" }
q270191
Client._process_connect_init
test
def _process_connect_init(self): """ Process INFO received from the server and CONNECT to the server with authentication. It is also responsible of setting up the reading and ping interval tasks from the client. """ self._status = Client.CONNECTING connection_completed = self._io_reader.readline() info_line = yield from asyncio.wait_for(connection_completed, self.options["connect_timeout"]) if INFO_OP not in info_line: raise NatsError("nats: empty response from server when expecting INFO message") _, info = info_line.split(INFO_OP + _SPC_, 1) try: srv_info = json.loads(info.decode()) except: raise NatsError("nats: info message, json parse error") self._process_info(srv_info) self._server_info = srv_info if 'max_payload' in self._server_info: self._max_payload = self._server_info["max_payload"] if 'tls_required' in self._server_info and self._server_info['tls_required']: ssl_context = None if "tls" in self.options: ssl_context = self.options.get('tls') elif self._current_server.uri.scheme == 'tls': ssl_context = ssl.create_default_context() else: raise NatsError('nats: no ssl context provided') transport = self._io_writer.transport sock = transport.get_extra_info('socket') if not sock: # This shouldn't happen raise NatsError('nats: unable to get socket') yield from self._io_writer.drain() # just in case something is left self._io_reader, self._io_writer = \ yield from asyncio.open_connection( loop=self._loop, limit=DEFAULT_BUFFER_SIZE, sock=sock, ssl=ssl_context, server_hostname=self._current_server.uri.hostname, ) # Refresh state of parser upon reconnect. if self.is_reconnecting: self._ps.reset() connect_cmd = self._connect_command() self._io_writer.write(connect_cmd) self._io_writer.write(PING_PROTO) yield from self._io_writer.drain() # FIXME: Add readline timeout next_op = yield from self._io_reader.readline() if self.options["verbose"] and OK_OP in next_op: next_op = yield from self._io_reader.readline() if ERR_OP in next_op: err_line = next_op.decode() _, err_msg = err_line.split(" ", 1) # FIXME: Maybe handling could be more special here, # checking for ErrAuthorization for example. # yield from self._process_err(err_msg) raise NatsError("nats: " + err_msg.rstrip('\r\n')) if PONG_PROTO in next_op: self._status = Client.CONNECTED self._reading_task = self._loop.create_task(self._read_loop()) self._pongs = [] self._pings_outstanding = 0 self._ping_interval_task = self._loop.create_task( self._ping_interval()) # Task for kicking the flusher queue self._flusher_task = self._loop.create_task(self._flusher())
python
{ "resource": "" }
q270192
Client._flusher
test
def _flusher(self): """ Coroutine which continuously tries to consume pending commands and then flushes them to the socket. """ while True: if not self.is_connected or self.is_connecting: break try: yield from self._flush_queue.get() if self._pending_data_size > 0: self._io_writer.writelines(self._pending[:]) self._pending = [] self._pending_data_size = 0 yield from self._io_writer.drain() except OSError as e: if self._error_cb is not None: yield from self._error_cb(e) yield from self._process_op_err(e) break except asyncio.CancelledError: break
python
{ "resource": "" }
q270193
Client._read_loop
test
def _read_loop(self): """ Coroutine which gathers bytes sent by the server and feeds them to the protocol parser. In case of error while reading, it will stop running and its task has to be rescheduled. """ while True: try: should_bail = self.is_closed or self.is_reconnecting if should_bail or self._io_reader is None: break if self.is_connected and self._io_reader.at_eof(): if self._error_cb is not None: yield from self._error_cb(ErrStaleConnection) yield from self._process_op_err(ErrStaleConnection) break b = yield from self._io_reader.read(DEFAULT_BUFFER_SIZE) yield from self._ps.parse(b) except ErrProtocol: yield from self._process_op_err(ErrProtocol) break except OSError as e: yield from self._process_op_err(e) break except asyncio.CancelledError: break
python
{ "resource": "" }
q270194
coactivation
test
def coactivation(dataset, seed, threshold=0.0, output_dir='.', prefix='', r=6): """ Compute and save coactivation map given input image as seed. This is essentially just a wrapper for a meta-analysis defined by the contrast between those studies that activate within the seed and those that don't. Args: dataset: a Dataset instance containing study and activation data. seed: either a Nifti or Analyze image defining the boundaries of the seed, or a list of triples (x/y/z) defining the seed(s). Note that voxels do not need to be contiguous to define a seed--all supra- threshold voxels will be lumped together. threshold: optional float indicating the threshold above which voxels are considered to be part of the seed ROI (default = 0) r: optional integer indicating radius (in mm) of spheres to grow (only used if seed is a list of coordinates). output_dir: output directory to write to. Defaults to current. If none, defaults to using the first part of the seed filename. prefix: optional string to prepend to all coactivation images. Output: A set of meta-analysis images identical to that generated by meta.MetaAnalysis. """ if isinstance(seed, string_types): ids = dataset.get_studies(mask=seed, activation_threshold=threshold) else: ids = dataset.get_studies(peaks=seed, r=r, activation_threshold=threshold) ma = meta.MetaAnalysis(dataset, ids) ma.save_results(output_dir, prefix)
python
{ "resource": "" }
q270195
Decoder.decode
test
def decode(self, images, save=None, round=4, names=None, **kwargs): """ Decodes a set of images. Args: images: The images to decode. Can be: - A single String specifying the filename of the image to decode - A list of filenames - A single NumPy array containing the image data save: Optional filename to save results to. If None (default), returns all results as an array. round: Optional integer indicating number of decimals to round result to. Defaults to 4. names: Optional list of names corresponding to the images in filenames. If passed, must be of same length and in same order as filenames. By default, the columns in the output will be named using the image filenames. Returns: An n_features x n_files numpy array, where each feature is a row and each image is a column. The meaning of the values depends on the decoding method used. """ if isinstance(images, string_types): images = [images] if isinstance(images, list): imgs_to_decode = imageutils.load_imgs(images, self.masker) else: imgs_to_decode = images methods = { 'pearson': self._pearson_correlation, 'dot': self._dot_product, 'roi': self._roi_association } result = np.around( methods[self.method](imgs_to_decode, **kwargs), round) # if save is not None: if names is None: if type(images).__module__ == np.__name__: names = ['image_%d' % i for i in range(images.shape[1])] elif self.method == 'roi': names = ['cluster_%d' % i for i in range(result.shape[1])] else: names = images result = pd.DataFrame(result, columns=names, index=self.feature_names) if save is not None: result.to_csv(save, index_label='Feature') return result
python
{ "resource": "" }
q270196
Decoder._load_features_from_array
test
def _load_features_from_array(self, features): """ Load feature data from a 2D ndarray on disk. """ self.feature_images = np.load(features) self.feature_names = range(self.feature_images.shape[1])
python
{ "resource": "" }
q270197
Decoder._load_features_from_images
test
def _load_features_from_images(self, images, names=None): """ Load feature image data from image files. Args: images: A list of image filenames. names: An optional list of strings to use as the feature names. Must be in the same order as the images. """ if names is not None and len(names) != len(images): raise Exception( "Lists of feature names and images must be of same length!") self.feature_names = names if names is not None else images self.feature_images = imageutils.load_imgs(images, self.masker)
python
{ "resource": "" }
q270198
Decoder._pearson_correlation
test
def _pearson_correlation(self, imgs_to_decode): """ Decode images using Pearson's r. Computes the correlation between each input image and each feature image across voxels. Args: imgs_to_decode: An ndarray of images to decode, with voxels in rows and images in columns. Returns: An n_features x n_images 2D array, with each cell representing the pearson correlation between the i'th feature and the j'th image across all voxels. """ x, y = imgs_to_decode.astype(float), self.feature_images.astype(float) return self._xy_corr(x, y)
python
{ "resource": "" }
q270199
Decoder._dot_product
test
def _dot_product(self, imgs_to_decode): """ Decoding using the dot product. """ return np.dot(imgs_to_decode.T, self.feature_images).T
python
{ "resource": "" }