content stringlengths 1 103k ⌀ | path stringlengths 8 216 | filename stringlengths 2 179 | language stringclasses 15
values | size_bytes int64 2 189k | quality_score float64 0.5 0.95 | complexity float64 0 1 | documentation_ratio float64 0 1 | repository stringclasses 5
values | stars int64 0 1k | created_date stringdate 2023-07-10 19:21:08 2025-07-09 19:11:45 | license stringclasses 4
values | is_test bool 2
classes | file_hash stringlengths 32 32 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
"""A Jupyter console app to run files."""\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\nfrom __future__ import annotations\n\nimport queue\nimport signal\nimport sys\nimport time\nimport typing as t\n\nfrom jupyter_core.application import JupyterApp, base_aliases, base_flags\nfrom traitlets import Any, Dict, Float\nfrom traitlets.config import catch_config_error\n\nfrom . import __version__\nfrom .consoleapp import JupyterConsoleApp, app_aliases, app_flags\n\nOUTPUT_TIMEOUT = 10\n\n# copy flags from mixin:\nflags = dict(base_flags)\n# start with mixin frontend flags:\nfrontend_flags_dict = dict(app_flags)\n# update full dict with frontend flags:\nflags.update(frontend_flags_dict)\n\n# copy flags from mixin\naliases = dict(base_aliases)\n# start with mixin frontend flags\nfrontend_aliases_dict = dict(app_aliases)\n# load updated frontend flags into full dict\naliases.update(frontend_aliases_dict)\n\n# get flags&aliases into sets, and remove a couple that\n# shouldn't be scrubbed from backend flags:\nfrontend_aliases = set(frontend_aliases_dict.keys())\nfrontend_flags = set(frontend_flags_dict.keys())\n\n\nclass RunApp(JupyterApp, JupyterConsoleApp): # type:ignore[misc]\n """An Jupyter Console app to run files."""\n\n version = __version__\n name = "jupyter run"\n description = """Run Jupyter kernel code."""\n flags = Dict(flags) # type:ignore[assignment]\n aliases = Dict(aliases) # type:ignore[assignment]\n frontend_aliases = Any(frontend_aliases)\n frontend_flags = Any(frontend_flags)\n kernel_timeout = Float(\n 60,\n config=True,\n help="""Timeout for giving up on a kernel (in seconds).\n\n On first connect and restart, the console tests whether the\n kernel is running and responsive by sending kernel_info_requests.\n This sets the timeout in seconds for how long the kernel can take\n before being presumed dead.\n """,\n )\n\n def parse_command_line(self, argv: list[str] | None = None) -> None:\n """Parse the command line arguments."""\n super().parse_command_line(argv)\n self.build_kernel_argv(self.extra_args)\n self.filenames_to_run = self.extra_args[:]\n\n @catch_config_error\n def initialize(self, argv: list[str] | None = None) -> None: # type:ignore[override]\n """Initialize the app."""\n self.log.debug("jupyter run: initialize...")\n super().initialize(argv)\n JupyterConsoleApp.initialize(self)\n signal.signal(signal.SIGINT, self.handle_sigint)\n self.init_kernel_info()\n\n def handle_sigint(self, *args: t.Any) -> None:\n """Handle SIGINT."""\n if self.kernel_manager:\n self.kernel_manager.interrupt_kernel()\n else:\n self.log.error("Cannot interrupt kernels we didn't start.\n")\n\n def init_kernel_info(self) -> None:\n """Wait for a kernel to be ready, and store kernel info"""\n timeout = self.kernel_timeout\n tic = time.time()\n self.kernel_client.hb_channel.unpause()\n msg_id = self.kernel_client.kernel_info()\n while True:\n try:\n reply = self.kernel_client.get_shell_msg(timeout=1)\n except queue.Empty as e:\n if (time.time() - tic) > timeout:\n msg = "Kernel didn't respond to kernel_info_request"\n raise RuntimeError(msg) from e\n else:\n if reply["parent_header"].get("msg_id") == msg_id:\n self.kernel_info = reply["content"]\n return\n\n def start(self) -> None:\n """Start the application."""\n self.log.debug("jupyter run: starting...")\n super().start()\n if self.filenames_to_run:\n for filename in self.filenames_to_run:\n self.log.debug("jupyter run: executing `%s`", filename)\n with open(filename) as fp:\n code = fp.read()\n reply = self.kernel_client.execute_interactive(code, timeout=OUTPUT_TIMEOUT)\n return_code = 0 if reply["content"]["status"] == "ok" else 1\n if return_code:\n raise Exception("jupyter-run error running '%s'" % filename)\n else:\n code = sys.stdin.read()\n reply = self.kernel_client.execute_interactive(code, timeout=OUTPUT_TIMEOUT)\n return_code = 0 if reply["content"]["status"] == "ok" else 1\n if return_code:\n msg = "jupyter-run error running 'stdin'"\n raise Exception(msg)\n\n\nmain = launch_new_instance = RunApp.launch_instance\n\nif __name__ == "__main__":\n main()\n | .venv\Lib\site-packages\jupyter_client\runapp.py | runapp.py | Python | 4,684 | 0.95 | 0.164063 | 0.091743 | vue-tools | 298 | 2024-11-27T07:55:57.993038 | Apache-2.0 | false | 604612c3a102c80cf3611c021d113cfd |
"""Session object for building, serializing, sending, and receiving messages.\n\nThe Session object supports serialization, HMAC signatures,\nand metadata on messages.\n\nAlso defined here are utilities for working with Sessions:\n* A SessionFactory to be used as a base class for configurables that work with\nSessions.\n* A Message object for convenience that allows attribute-access to the msg dict.\n"""\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\nfrom __future__ import annotations\n\nimport hashlib\nimport hmac\nimport json\nimport logging\nimport os\nimport pickle\nimport pprint\nimport random\nimport typing as t\nimport warnings\nfrom binascii import b2a_hex\nfrom datetime import datetime, timezone\nfrom hmac import compare_digest\n\n# We are using compare_digest to limit the surface of timing attacks\nimport zmq.asyncio\nfrom tornado.ioloop import IOLoop\nfrom traitlets import (\n Any,\n Bool,\n CBytes,\n CUnicode,\n Dict,\n DottedObjectName,\n Instance,\n Integer,\n Set,\n TraitError,\n Unicode,\n observe,\n)\nfrom traitlets.config.configurable import Configurable, LoggingConfigurable\nfrom traitlets.log import get_logger\nfrom traitlets.utils.importstring import import_item\nfrom zmq.eventloop.zmqstream import ZMQStream\n\nfrom ._version import protocol_version\nfrom .adapter import adapt\nfrom .jsonutil import extract_dates, json_clean, json_default, squash_dates\n\nPICKLE_PROTOCOL = pickle.DEFAULT_PROTOCOL\n\nutc = timezone.utc\n\n# -----------------------------------------------------------------------------\n# utility functions\n# -----------------------------------------------------------------------------\n\n\ndef squash_unicode(obj: t.Any) -> t.Any:\n """coerce unicode back to bytestrings."""\n if isinstance(obj, dict):\n for key in list(obj.keys()):\n obj[key] = squash_unicode(obj[key])\n if isinstance(key, str):\n obj[squash_unicode(key)] = obj.pop(key)\n elif isinstance(obj, list):\n for i, v in enumerate(obj):\n obj[i] = squash_unicode(v)\n elif isinstance(obj, str):\n obj = obj.encode("utf8")\n return obj\n\n\n# -----------------------------------------------------------------------------\n# globals and defaults\n# -----------------------------------------------------------------------------\n\n# default values for the thresholds:\nMAX_ITEMS = 64\nMAX_BYTES = 1024\n\n# ISO8601-ify datetime objects\n# allow unicode\n# disallow nan, because it's not actually valid JSON\n\n\ndef json_packer(obj: t.Any) -> bytes:\n """Convert a json object to a bytes."""\n try:\n return json.dumps(\n obj,\n default=json_default,\n ensure_ascii=False,\n allow_nan=False,\n ).encode("utf8", errors="surrogateescape")\n except (TypeError, ValueError) as e:\n # Fallback to trying to clean the json before serializing\n packed = json.dumps(\n json_clean(obj),\n default=json_default,\n ensure_ascii=False,\n allow_nan=False,\n ).encode("utf8", errors="surrogateescape")\n\n warnings.warn(\n f"Message serialization failed with:\n{e}\n"\n "Supporting this message is deprecated in jupyter-client 7, please make "\n "sure your message is JSON-compliant",\n stacklevel=2,\n )\n\n return packed\n\n\ndef json_unpacker(s: str | bytes) -> t.Any:\n """Convert a json bytes or string to an object."""\n if isinstance(s, bytes):\n s = s.decode("utf8", "replace")\n return json.loads(s)\n\n\ndef pickle_packer(o: t.Any) -> bytes:\n """Pack an object using the pickle module."""\n return pickle.dumps(squash_dates(o), PICKLE_PROTOCOL)\n\n\npickle_unpacker = pickle.loads\n\ndefault_packer = json_packer\ndefault_unpacker = json_unpacker\n\nDELIM = b"<IDS|MSG>"\n# singleton dummy tracker, which will always report as done\nDONE = zmq.MessageTracker()\n\n# -----------------------------------------------------------------------------\n# Mixin tools for apps that use Sessions\n# -----------------------------------------------------------------------------\n\n\ndef new_id() -> str:\n """Generate a new random id.\n\n Avoids problematic runtime import in stdlib uuid on Python 2.\n\n Returns\n -------\n\n id string (16 random bytes as hex-encoded text, chunks separated by '-')\n """\n buf = os.urandom(16)\n return "-".join(b2a_hex(x).decode("ascii") for x in (buf[:4], buf[4:]))\n\n\ndef new_id_bytes() -> bytes:\n """Return new_id as ascii bytes"""\n return new_id().encode("ascii")\n\n\nsession_aliases = {\n "ident": "Session.session",\n "user": "Session.username",\n "keyfile": "Session.keyfile",\n}\n\nsession_flags = {\n "secure": (\n {"Session": {"key": new_id_bytes(), "keyfile": ""}},\n """Use HMAC digests for authentication of messages.\n Setting this flag will generate a new UUID to use as the HMAC key.\n """,\n ),\n "no-secure": (\n {"Session": {"key": b"", "keyfile": ""}},\n """Don't authenticate messages.""",\n ),\n}\n\n\ndef default_secure(cfg: t.Any) -> None: # pragma: no cover\n """Set the default behavior for a config environment to be secure.\n\n If Session.key/keyfile have not been set, set Session.key to\n a new random UUID.\n """\n warnings.warn("default_secure is deprecated", DeprecationWarning, stacklevel=2)\n if "Session" in cfg and ("key" in cfg.Session or "keyfile" in cfg.Session):\n return\n # key/keyfile not specified, generate new UUID:\n cfg.Session.key = new_id_bytes()\n\n\ndef utcnow() -> datetime:\n """Return timezone-aware UTC timestamp"""\n return datetime.now(utc)\n\n\n# -----------------------------------------------------------------------------\n# Classes\n# -----------------------------------------------------------------------------\n\n\nclass SessionFactory(LoggingConfigurable):\n """The Base class for configurables that have a Session, Context, logger,\n and IOLoop.\n """\n\n logname = Unicode("")\n\n @observe("logname")\n def _logname_changed(self, change: t.Any) -> None:\n self.log = logging.getLogger(change["new"])\n\n # not configurable:\n context = Instance("zmq.Context")\n\n def _context_default(self) -> zmq.Context:\n return zmq.Context()\n\n session = Instance("jupyter_client.session.Session", allow_none=True)\n\n loop = Instance("tornado.ioloop.IOLoop")\n\n def _loop_default(self) -> IOLoop:\n return IOLoop.current()\n\n def __init__(self, **kwargs: t.Any) -> None:\n """Initialize a session factory."""\n super().__init__(**kwargs)\n\n if self.session is None:\n # construct the session\n self.session = Session(**kwargs)\n\n\nclass Message:\n """A simple message object that maps dict keys to attributes.\n\n A Message can be created from a dict and a dict from a Message instance\n simply by calling dict(msg_obj)."""\n\n def __init__(self, msg_dict: dict[str, t.Any]) -> None:\n """Initialize a message."""\n dct = self.__dict__\n for k, v in dict(msg_dict).items():\n if isinstance(v, dict):\n v = Message(v) # noqa\n dct[k] = v\n\n # Having this iterator lets dict(msg_obj) work out of the box.\n def __iter__(self) -> t.ItemsView[str, t.Any]:\n return iter(self.__dict__.items()) # type:ignore[return-value]\n\n def __repr__(self) -> str:\n return repr(self.__dict__)\n\n def __str__(self) -> str:\n return pprint.pformat(self.__dict__)\n\n def __contains__(self, k: object) -> bool:\n return k in self.__dict__\n\n def __getitem__(self, k: str) -> t.Any:\n return self.__dict__[k]\n\n\ndef msg_header(\n msg_id: str, msg_type: str, username: str, session: Session | str\n) -> dict[str, t.Any]:\n """Create a new message header"""\n date = utcnow()\n version = protocol_version\n return locals()\n\n\ndef extract_header(msg_or_header: dict[str, t.Any]) -> dict[str, t.Any]:\n """Given a message or header, return the header."""\n if not msg_or_header:\n return {}\n try:\n # See if msg_or_header is the entire message.\n h = msg_or_header["header"]\n except KeyError:\n try:\n # See if msg_or_header is just the header\n h = msg_or_header["msg_id"]\n except KeyError:\n raise\n else:\n h = msg_or_header\n if not isinstance(h, dict):\n h = dict(h)\n return h\n\n\nclass Session(Configurable):\n """Object for handling serialization and sending of messages.\n\n The Session object handles building messages and sending them\n with ZMQ sockets or ZMQStream objects. Objects can communicate with each\n other over the network via Session objects, and only need to work with the\n dict-based IPython message spec. The Session will handle\n serialization/deserialization, security, and metadata.\n\n Sessions support configurable serialization via packer/unpacker traits,\n and signing with HMAC digests via the key/keyfile traits.\n\n Parameters\n ----------\n\n debug : bool\n whether to trigger extra debugging statements\n packer/unpacker : str : 'json', 'pickle' or import_string\n importstrings for methods to serialize message parts. If just\n 'json' or 'pickle', predefined JSON and pickle packers will be used.\n Otherwise, the entire importstring must be used.\n\n The functions must accept at least valid JSON input, and output *bytes*.\n\n For example, to use msgpack:\n packer = 'msgpack.packb', unpacker='msgpack.unpackb'\n pack/unpack : callables\n You can also set the pack/unpack callables for serialization directly.\n session : bytes\n the ID of this Session object. The default is to generate a new UUID.\n username : unicode\n username added to message headers. The default is to ask the OS.\n key : bytes\n The key used to initialize an HMAC signature. If unset, messages\n will not be signed or checked.\n keyfile : filepath\n The file containing a key. If this is set, `key` will be initialized\n to the contents of the file.\n\n """\n\n debug = Bool(False, config=True, help="""Debug output in the Session""")\n\n check_pid = Bool(\n True,\n config=True,\n help="""Whether to check PID to protect against calls after fork.\n\n This check can be disabled if fork-safety is handled elsewhere.\n """,\n )\n\n packer = DottedObjectName(\n "json",\n config=True,\n help="""The name of the packer for serializing messages.\n Should be one of 'json', 'pickle', or an import name\n for a custom callable serializer.""",\n )\n\n @observe("packer")\n def _packer_changed(self, change: t.Any) -> None:\n new = change["new"]\n if new.lower() == "json":\n self.pack = json_packer\n self.unpack = json_unpacker\n self.unpacker = new\n elif new.lower() == "pickle":\n self.pack = pickle_packer\n self.unpack = pickle_unpacker\n self.unpacker = new\n else:\n self.pack = import_item(str(new))\n\n unpacker = DottedObjectName(\n "json",\n config=True,\n help="""The name of the unpacker for unserializing messages.\n Only used with custom functions for `packer`.""",\n )\n\n @observe("unpacker")\n def _unpacker_changed(self, change: t.Any) -> None:\n new = change["new"]\n if new.lower() == "json":\n self.pack = json_packer\n self.unpack = json_unpacker\n self.packer = new\n elif new.lower() == "pickle":\n self.pack = pickle_packer\n self.unpack = pickle_unpacker\n self.packer = new\n else:\n self.unpack = import_item(str(new))\n\n session = CUnicode("", config=True, help="""The UUID identifying this session.""")\n\n def _session_default(self) -> str:\n u = new_id()\n self.bsession = u.encode("ascii")\n return u\n\n @observe("session")\n def _session_changed(self, change: t.Any) -> None:\n self.bsession = self.session.encode("ascii")\n\n # bsession is the session as bytes\n bsession = CBytes(b"")\n\n username = Unicode(\n os.environ.get("USER", "username"),\n help="""Username for the Session. Default is your system username.""",\n config=True,\n )\n\n metadata = Dict(\n {},\n config=True,\n help="Metadata dictionary, which serves as the default top-level metadata dict for each "\n "message.",\n )\n\n # if 0, no adapting to do.\n adapt_version = Integer(0)\n\n # message signature related traits:\n\n key = CBytes(config=True, help="""execution key, for signing messages.""")\n\n def _key_default(self) -> bytes:\n return new_id_bytes()\n\n @observe("key")\n def _key_changed(self, change: t.Any) -> None:\n self._new_auth()\n\n signature_scheme = Unicode(\n "hmac-sha256",\n config=True,\n help="""The digest scheme used to construct the message signatures.\n Must have the form 'hmac-HASH'.""",\n )\n\n @observe("signature_scheme")\n def _signature_scheme_changed(self, change: t.Any) -> None:\n new = change["new"]\n if not new.startswith("hmac-"):\n raise TraitError("signature_scheme must start with 'hmac-', got %r" % new)\n hash_name = new.split("-", 1)[1]\n try:\n self.digest_mod = getattr(hashlib, hash_name)\n except AttributeError as e:\n raise TraitError("hashlib has no such attribute: %s" % hash_name) from e\n self._new_auth()\n\n digest_mod = Any()\n\n def _digest_mod_default(self) -> t.Callable:\n return hashlib.sha256\n\n auth = Instance(hmac.HMAC, allow_none=True)\n\n def _new_auth(self) -> None:\n if self.key:\n self.auth = hmac.HMAC(self.key, digestmod=self.digest_mod)\n else:\n self.auth = None\n\n digest_history = Set()\n digest_history_size = Integer(\n 2**16,\n config=True,\n help="""The maximum number of digests to remember.\n\n The digest history will be culled when it exceeds this value.\n """,\n )\n\n keyfile = Unicode("", config=True, help="""path to file containing execution key.""")\n\n @observe("keyfile")\n def _keyfile_changed(self, change: t.Any) -> None:\n with open(change["new"], "rb") as f:\n self.key = f.read().strip()\n\n # for protecting against sends from forks\n pid = Integer()\n\n # serialization traits:\n\n pack = Any(default_packer) # the actual packer function\n\n @observe("pack")\n def _pack_changed(self, change: t.Any) -> None:\n new = change["new"]\n if not callable(new):\n raise TypeError("packer must be callable, not %s" % type(new))\n\n unpack = Any(default_unpacker) # the actual packer function\n\n @observe("unpack")\n def _unpack_changed(self, change: t.Any) -> None:\n # unpacker is not checked - it is assumed to be\n new = change["new"]\n if not callable(new):\n raise TypeError("unpacker must be callable, not %s" % type(new))\n\n # thresholds:\n copy_threshold = Integer(\n 2**16,\n config=True,\n help="Threshold (in bytes) beyond which a buffer should be sent without copying.",\n )\n buffer_threshold = Integer(\n MAX_BYTES,\n config=True,\n help="Threshold (in bytes) beyond which an object's buffer should be extracted to avoid "\n "pickling.",\n )\n item_threshold = Integer(\n MAX_ITEMS,\n config=True,\n help="""The maximum number of items for a container to be introspected for custom serialization.\n Containers larger than this are pickled outright.\n """,\n )\n\n def __init__(self, **kwargs: t.Any) -> None:\n """create a Session object\n\n Parameters\n ----------\n\n debug : bool\n whether to trigger extra debugging statements\n packer/unpacker : str : 'json', 'pickle' or import_string\n importstrings for methods to serialize message parts. If just\n 'json' or 'pickle', predefined JSON and pickle packers will be used.\n Otherwise, the entire importstring must be used.\n\n The functions must accept at least valid JSON input, and output\n *bytes*.\n\n For example, to use msgpack:\n packer = 'msgpack.packb', unpacker='msgpack.unpackb'\n pack/unpack : callables\n You can also set the pack/unpack callables for serialization\n directly.\n session : unicode (must be ascii)\n the ID of this Session object. The default is to generate a new\n UUID.\n bsession : bytes\n The session as bytes\n username : unicode\n username added to message headers. The default is to ask the OS.\n key : bytes\n The key used to initialize an HMAC signature. If unset, messages\n will not be signed or checked.\n signature_scheme : str\n The message digest scheme. Currently must be of the form 'hmac-HASH',\n where 'HASH' is a hashing function available in Python's hashlib.\n The default is 'hmac-sha256'.\n This is ignored if 'key' is empty.\n keyfile : filepath\n The file containing a key. If this is set, `key` will be\n initialized to the contents of the file.\n """\n super().__init__(**kwargs)\n self._check_packers()\n self.none = self.pack({})\n # ensure self._session_default() if necessary, so bsession is defined:\n self.session # noqa\n self.pid = os.getpid()\n self._new_auth()\n if not self.key:\n get_logger().warning(\n "Message signing is disabled. This is insecure and not recommended!"\n )\n\n def clone(self) -> Session:\n """Create a copy of this Session\n\n Useful when connecting multiple times to a given kernel.\n This prevents a shared digest_history warning about duplicate digests\n due to multiple connections to IOPub in the same process.\n\n .. versionadded:: 5.1\n """\n # make a copy\n new_session = type(self)()\n for name in self.traits():\n setattr(new_session, name, getattr(self, name))\n # fork digest_history\n new_session.digest_history = set()\n new_session.digest_history.update(self.digest_history)\n return new_session\n\n message_count = 0\n\n @property\n def msg_id(self) -> str:\n message_number = self.message_count\n self.message_count += 1\n return f"{self.session}_{os.getpid()}_{message_number}"\n\n def _check_packers(self) -> None:\n """check packers for datetime support."""\n pack = self.pack\n unpack = self.unpack\n\n # check simple serialization\n msg_list = {"a": [1, "hi"]}\n try:\n packed = pack(msg_list)\n except Exception as e:\n msg = f"packer '{self.packer}' could not serialize a simple message: {e}"\n raise ValueError(msg) from e\n\n # ensure packed message is bytes\n if not isinstance(packed, bytes):\n raise ValueError("message packed to %r, but bytes are required" % type(packed))\n\n # check that unpack is pack's inverse\n try:\n unpacked = unpack(packed)\n assert unpacked == msg_list\n except Exception as e:\n msg = (\n f"unpacker '{self.unpacker}' could not handle output from packer"\n f" '{self.packer}': {e}"\n )\n raise ValueError(msg) from e\n\n # check datetime support\n msg_datetime = {"t": utcnow()}\n try:\n unpacked = unpack(pack(msg_datetime))\n if isinstance(unpacked["t"], datetime):\n msg = "Shouldn't deserialize to datetime"\n raise ValueError(msg)\n except Exception:\n self.pack = lambda o: pack(squash_dates(o))\n self.unpack = lambda s: unpack(s)\n\n def msg_header(self, msg_type: str) -> dict[str, t.Any]:\n """Create a header for a message type."""\n return msg_header(self.msg_id, msg_type, self.username, self.session)\n\n def msg(\n self,\n msg_type: str,\n content: dict | None = None,\n parent: dict[str, t.Any] | None = None,\n header: dict[str, t.Any] | None = None,\n metadata: dict[str, t.Any] | None = None,\n ) -> dict[str, t.Any]:\n """Return the nested message dict.\n\n This format is different from what is sent over the wire. The\n serialize/deserialize methods converts this nested message dict to the wire\n format, which is a list of message parts.\n """\n msg = {}\n header = self.msg_header(msg_type) if header is None else header\n msg["header"] = header\n msg["msg_id"] = header["msg_id"]\n msg["msg_type"] = header["msg_type"]\n msg["parent_header"] = {} if parent is None else extract_header(parent)\n msg["content"] = {} if content is None else content\n msg["metadata"] = self.metadata.copy()\n if metadata is not None:\n msg["metadata"].update(metadata)\n return msg\n\n def sign(self, msg_list: list) -> bytes:\n """Sign a message with HMAC digest. If no auth, return b''.\n\n Parameters\n ----------\n msg_list : list\n The [p_header,p_parent,p_content] part of the message list.\n """\n if self.auth is None:\n return b""\n h = self.auth.copy()\n for m in msg_list:\n h.update(m)\n return h.hexdigest().encode()\n\n def serialize(\n self,\n msg: dict[str, t.Any],\n ident: list[bytes] | bytes | None = None,\n ) -> list[bytes]:\n """Serialize the message components to bytes.\n\n This is roughly the inverse of deserialize. The serialize/deserialize\n methods work with full message lists, whereas pack/unpack work with\n the individual message parts in the message list.\n\n Parameters\n ----------\n msg : dict or Message\n The next message dict as returned by the self.msg method.\n\n Returns\n -------\n msg_list : list\n The list of bytes objects to be sent with the format::\n\n [ident1, ident2, ..., DELIM, HMAC, p_header, p_parent,\n p_metadata, p_content, buffer1, buffer2, ...]\n\n In this list, the ``p_*`` entities are the packed or serialized\n versions, so if JSON is used, these are utf8 encoded JSON strings.\n """\n content = msg.get("content", {})\n if content is None:\n content = self.none\n elif isinstance(content, dict):\n content = self.pack(content)\n elif isinstance(content, bytes):\n # content is already packed, as in a relayed message\n pass\n elif isinstance(content, str):\n # should be bytes, but JSON often spits out unicode\n content = content.encode("utf8")\n else:\n raise TypeError("Content incorrect type: %s" % type(content))\n\n real_message = [\n self.pack(msg["header"]),\n self.pack(msg["parent_header"]),\n self.pack(msg["metadata"]),\n content,\n ]\n\n to_send = []\n\n if isinstance(ident, list):\n # accept list of idents\n to_send.extend(ident)\n elif ident is not None:\n to_send.append(ident)\n to_send.append(DELIM)\n\n signature = self.sign(real_message)\n to_send.append(signature)\n\n to_send.extend(real_message)\n\n return to_send\n\n def send(\n self,\n stream: zmq.sugar.socket.Socket | ZMQStream | None,\n msg_or_type: dict[str, t.Any] | str,\n content: dict[str, t.Any] | None = None,\n parent: dict[str, t.Any] | None = None,\n ident: bytes | list[bytes] | None = None,\n buffers: list[bytes] | None = None,\n track: bool = False,\n header: dict[str, t.Any] | None = None,\n metadata: dict[str, t.Any] | None = None,\n ) -> dict[str, t.Any] | None:\n """Build and send a message via stream or socket.\n\n The message format used by this function internally is as follows:\n\n [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_content,\n buffer1,buffer2,...]\n\n The serialize/deserialize methods convert the nested message dict into this\n format.\n\n Parameters\n ----------\n\n stream : zmq.Socket or ZMQStream\n The socket-like object used to send the data.\n msg_or_type : str or Message/dict\n Normally, msg_or_type will be a msg_type unless a message is being\n sent more than once. If a header is supplied, this can be set to\n None and the msg_type will be pulled from the header.\n\n content : dict or None\n The content of the message (ignored if msg_or_type is a message).\n header : dict or None\n The header dict for the message (ignored if msg_to_type is a message).\n parent : Message or dict or None\n The parent or parent header describing the parent of this message\n (ignored if msg_or_type is a message).\n ident : bytes or list of bytes\n The zmq.IDENTITY routing path.\n metadata : dict or None\n The metadata describing the message\n buffers : list or None\n The already-serialized buffers to be appended to the message.\n track : bool\n Whether to track. Only for use with Sockets, because ZMQStream\n objects cannot track messages.\n\n\n Returns\n -------\n msg : dict\n The constructed message.\n """\n if not isinstance(stream, zmq.Socket):\n # ZMQStreams and dummy sockets do not support tracking.\n track = False\n\n if isinstance(stream, zmq.asyncio.Socket):\n assert stream is not None # type:ignore[unreachable]\n stream = zmq.Socket.shadow(stream.underlying)\n\n if isinstance(msg_or_type, (Message, dict)):\n # We got a Message or message dict, not a msg_type so don't\n # build a new Message.\n msg = msg_or_type\n buffers = buffers or msg.get("buffers", [])\n else:\n msg = self.msg(\n msg_or_type,\n content=content,\n parent=parent,\n header=header,\n metadata=metadata,\n )\n if self.check_pid and os.getpid() != self.pid:\n get_logger().warning("WARNING: attempted to send message from fork\n%s", msg)\n return None\n buffers = [] if buffers is None else buffers\n for idx, buf in enumerate(buffers):\n if isinstance(buf, memoryview):\n view = buf\n else:\n try:\n # check to see if buf supports the buffer protocol.\n view = memoryview(buf)\n except TypeError as e:\n emsg = "Buffer objects must support the buffer protocol."\n raise TypeError(emsg) from e\n # memoryview.contiguous is new in 3.3,\n # just skip the check on Python 2\n if hasattr(view, "contiguous") and not view.contiguous:\n # zmq requires memoryviews to be contiguous\n raise ValueError("Buffer %i (%r) is not contiguous" % (idx, buf))\n\n if self.adapt_version:\n msg = adapt(msg, self.adapt_version)\n to_send = self.serialize(msg, ident)\n to_send.extend(buffers)\n longest = max([len(s) for s in to_send])\n copy = longest < self.copy_threshold\n\n if stream and buffers and track and not copy:\n # only really track when we are doing zero-copy buffers\n tracker = stream.send_multipart(to_send, copy=False, track=True)\n elif stream:\n # use dummy tracker, which will be done immediately\n tracker = DONE\n stream.send_multipart(to_send, copy=copy)\n else:\n tracker = DONE\n\n if self.debug:\n pprint.pprint(msg) # noqa\n pprint.pprint(to_send) # noqa\n pprint.pprint(buffers) # noqa\n\n msg["tracker"] = tracker\n\n return msg\n\n def send_raw(\n self,\n stream: zmq.sugar.socket.Socket,\n msg_list: list,\n flags: int = 0,\n copy: bool = True,\n ident: bytes | list[bytes] | None = None,\n ) -> None:\n """Send a raw message via ident path.\n\n This method is used to send a already serialized message.\n\n Parameters\n ----------\n stream : ZMQStream or Socket\n The ZMQ stream or socket to use for sending the message.\n msg_list : list\n The serialized list of messages to send. This only includes the\n [p_header,p_parent,p_metadata,p_content,buffer1,buffer2,...] portion of\n the message.\n ident : ident or list\n A single ident or a list of idents to use in sending.\n """\n to_send = []\n if isinstance(ident, bytes):\n ident = [ident]\n if ident is not None:\n to_send.extend(ident)\n\n to_send.append(DELIM)\n # Don't include buffers in signature (per spec).\n to_send.append(self.sign(msg_list[0:4]))\n to_send.extend(msg_list)\n if isinstance(stream, zmq.asyncio.Socket):\n stream = zmq.Socket.shadow(stream.underlying)\n stream.send_multipart(to_send, flags, copy=copy)\n\n def recv(\n self,\n socket: zmq.sugar.socket.Socket,\n mode: int = zmq.NOBLOCK,\n content: bool = True,\n copy: bool = True,\n ) -> tuple[list[bytes] | None, dict[str, t.Any] | None]:\n """Receive and unpack a message.\n\n Parameters\n ----------\n socket : ZMQStream or Socket\n The socket or stream to use in receiving.\n\n Returns\n -------\n [idents], msg\n [idents] is a list of idents and msg is a nested message dict of\n same format as self.msg returns.\n """\n if isinstance(socket, ZMQStream): # type:ignore[unreachable]\n socket = socket.socket # type:ignore[unreachable]\n if isinstance(socket, zmq.asyncio.Socket):\n socket = zmq.Socket.shadow(socket.underlying)\n\n try:\n msg_list = socket.recv_multipart(mode, copy=copy)\n except zmq.ZMQError as e:\n if e.errno == zmq.EAGAIN:\n # We can convert EAGAIN to None as we know in this case\n # recv_multipart won't return None.\n return None, None\n else:\n raise\n # split multipart message into identity list and message dict\n # invalid large messages can cause very expensive string comparisons\n idents, msg_list = self.feed_identities(msg_list, copy)\n try:\n return idents, self.deserialize(msg_list, content=content, copy=copy)\n except Exception as e:\n # TODO: handle it\n raise e\n\n def feed_identities(\n self, msg_list: list[bytes] | list[zmq.Message], copy: bool = True\n ) -> tuple[list[bytes], list[bytes] | list[zmq.Message]]:\n """Split the identities from the rest of the message.\n\n Feed until DELIM is reached, then return the prefix as idents and\n remainder as msg_list. This is easily broken by setting an IDENT to DELIM,\n but that would be silly.\n\n Parameters\n ----------\n msg_list : a list of Message or bytes objects\n The message to be split.\n copy : bool\n flag determining whether the arguments are bytes or Messages\n\n Returns\n -------\n (idents, msg_list) : two lists\n idents will always be a list of bytes, each of which is a ZMQ\n identity. msg_list will be a list of bytes or zmq.Messages of the\n form [HMAC,p_header,p_parent,p_content,buffer1,buffer2,...] and\n should be unpackable/unserializable via self.deserialize at this\n point.\n """\n if copy:\n msg_list = t.cast(t.List[bytes], msg_list)\n idx = msg_list.index(DELIM)\n return msg_list[:idx], msg_list[idx + 1 :]\n else:\n msg_list = t.cast(t.List[zmq.Message], msg_list)\n failed = True\n for idx, m in enumerate(msg_list): # noqa\n if m.bytes == DELIM:\n failed = False\n break\n if failed:\n msg = "DELIM not in msg_list"\n raise ValueError(msg)\n idents, msg_list = msg_list[:idx], msg_list[idx + 1 :]\n return [bytes(m.bytes) for m in idents], msg_list\n\n def _add_digest(self, signature: bytes) -> None:\n """add a digest to history to protect against replay attacks"""\n if self.digest_history_size == 0:\n # no history, never add digests\n return\n\n self.digest_history.add(signature)\n if len(self.digest_history) > self.digest_history_size:\n # threshold reached, cull 10%\n self._cull_digest_history()\n\n def _cull_digest_history(self) -> None:\n """cull the digest history\n\n Removes a randomly selected 10% of the digest history\n """\n current = len(self.digest_history)\n n_to_cull = max(int(current // 10), current - self.digest_history_size)\n if n_to_cull >= current:\n self.digest_history = set()\n return\n to_cull = random.sample(tuple(sorted(self.digest_history)), n_to_cull)\n self.digest_history.difference_update(to_cull)\n\n def deserialize(\n self,\n msg_list: list[bytes] | list[zmq.Message],\n content: bool = True,\n copy: bool = True,\n ) -> dict[str, t.Any]:\n """Unserialize a msg_list to a nested message dict.\n\n This is roughly the inverse of serialize. The serialize/deserialize\n methods work with full message lists, whereas pack/unpack work with\n the individual message parts in the message list.\n\n Parameters\n ----------\n msg_list : list of bytes or Message objects\n The list of message parts of the form [HMAC,p_header,p_parent,\n p_metadata,p_content,buffer1,buffer2,...].\n content : bool (True)\n Whether to unpack the content dict (True), or leave it packed\n (False).\n copy : bool (True)\n Whether msg_list contains bytes (True) or the non-copying Message\n objects in each place (False).\n\n Returns\n -------\n msg : dict\n The nested message dict with top-level keys [header, parent_header,\n content, buffers]. The buffers are returned as memoryviews.\n """\n minlen = 5\n message = {}\n if not copy:\n # pyzmq didn't copy the first parts of the message, so we'll do it\n msg_list = t.cast(t.List[zmq.Message], msg_list)\n msg_list_beginning = [bytes(msg.bytes) for msg in msg_list[:minlen]]\n msg_list = t.cast(t.List[bytes], msg_list)\n msg_list = msg_list_beginning + msg_list[minlen:]\n msg_list = t.cast(t.List[bytes], msg_list)\n if self.auth is not None:\n signature = msg_list[0]\n if not signature:\n msg = "Unsigned Message"\n raise ValueError(msg)\n if signature in self.digest_history:\n raise ValueError("Duplicate Signature: %r" % signature)\n if content:\n # Only store signature if we are unpacking content, don't store if just peeking.\n self._add_digest(signature)\n check = self.sign(msg_list[1:5])\n if not compare_digest(signature, check):\n msg = "Invalid Signature: %r" % signature\n raise ValueError(msg)\n if not len(msg_list) >= minlen:\n msg = "malformed message, must have at least %i elements" % minlen\n raise TypeError(msg)\n header = self.unpack(msg_list[1])\n message["header"] = extract_dates(header)\n message["msg_id"] = header["msg_id"]\n message["msg_type"] = header["msg_type"]\n message["parent_header"] = extract_dates(self.unpack(msg_list[2]))\n message["metadata"] = self.unpack(msg_list[3])\n if content:\n message["content"] = self.unpack(msg_list[4])\n else:\n message["content"] = msg_list[4]\n buffers = [memoryview(b) for b in msg_list[5:]]\n if buffers and buffers[0].shape is None:\n # force copy to workaround pyzmq #646\n msg_list = t.cast(t.List[zmq.Message], msg_list)\n buffers = [memoryview(bytes(b.bytes)) for b in msg_list[5:]]\n message["buffers"] = buffers\n if self.debug:\n pprint.pprint(message) # noqa\n # adapt to the current version\n return adapt(message)\n\n def unserialize(self, *args: t.Any, **kwargs: t.Any) -> dict[str, t.Any]:\n """**DEPRECATED** Use deserialize instead."""\n # pragma: no cover\n warnings.warn(\n "Session.unserialize is deprecated. Use Session.deserialize.",\n DeprecationWarning,\n stacklevel=2,\n )\n return self.deserialize(*args, **kwargs)\n | .venv\Lib\site-packages\jupyter_client\session.py | session.py | Python | 37,774 | 0.95 | 0.160795 | 0.074514 | awesome-app | 314 | 2025-05-11T01:48:12.823915 | BSD-3-Clause | false | 2aa0d79992f421c514f252e4055a9171 |
""" Defines a KernelClient that provides thread-safe sockets with async callbacks on message\nreplies.\n"""\nimport asyncio\nimport atexit\nimport time\nfrom concurrent.futures import Future\nfrom functools import partial\nfrom threading import Thread\nfrom typing import Any, Dict, List, Optional\n\nimport zmq\nfrom tornado.ioloop import IOLoop\nfrom traitlets import Instance, Type\nfrom traitlets.log import get_logger\nfrom zmq.eventloop import zmqstream\n\nfrom .channels import HBChannel\nfrom .client import KernelClient\nfrom .session import Session\n\n# Local imports\n# import ZMQError in top-level namespace, to avoid ugly attribute-error messages\n# during garbage collection of threads at exit\n\n\nclass ThreadedZMQSocketChannel:\n """A ZMQ socket invoking a callback in the ioloop"""\n\n session = None\n socket = None\n ioloop = None\n stream = None\n _inspect = None\n\n def __init__(\n self,\n socket: Optional[zmq.Socket],\n session: Optional[Session],\n loop: Optional[IOLoop],\n ) -> None:\n """Create a channel.\n\n Parameters\n ----------\n socket : :class:`zmq.Socket`\n The ZMQ socket to use.\n session : :class:`session.Session`\n The session to use.\n loop\n A tornado ioloop to connect the socket to using a ZMQStream\n """\n super().__init__()\n\n self.socket = socket\n self.session = session\n self.ioloop = loop\n f: Future = Future()\n\n def setup_stream() -> None:\n try:\n assert self.socket is not None\n self.stream = zmqstream.ZMQStream(self.socket, self.ioloop)\n self.stream.on_recv(self._handle_recv)\n except Exception as e:\n f.set_exception(e)\n else:\n f.set_result(None)\n\n assert self.ioloop is not None\n self.ioloop.add_callback(setup_stream)\n # don't wait forever, raise any errors\n f.result(timeout=10)\n\n _is_alive = False\n\n def is_alive(self) -> bool:\n """Whether the channel is alive."""\n return self._is_alive\n\n def start(self) -> None:\n """Start the channel."""\n self._is_alive = True\n\n def stop(self) -> None:\n """Stop the channel."""\n self._is_alive = False\n\n def close(self) -> None:\n """Close the channel."""\n if self.stream is not None and self.ioloop is not None:\n # c.f.Future for threadsafe results\n f: Future = Future()\n\n def close_stream() -> None:\n try:\n if self.stream is not None:\n self.stream.close(linger=0)\n self.stream = None\n except Exception as e:\n f.set_exception(e)\n else:\n f.set_result(None)\n\n self.ioloop.add_callback(close_stream)\n # wait for result\n try:\n f.result(timeout=5)\n except Exception as e:\n log = get_logger()\n msg = f"Error closing stream {self.stream}: {e}"\n log.warning(msg, RuntimeWarning, stacklevel=2)\n\n if self.socket is not None:\n try:\n self.socket.close(linger=0)\n except Exception:\n pass\n self.socket = None\n\n def send(self, msg: Dict[str, Any]) -> None:\n """Queue a message to be sent from the IOLoop's thread.\n\n Parameters\n ----------\n msg : message to send\n\n This is threadsafe, as it uses IOLoop.add_callback to give the loop's\n thread control of the action.\n """\n\n def thread_send() -> None:\n assert self.session is not None\n self.session.send(self.stream, msg)\n\n assert self.ioloop is not None\n self.ioloop.add_callback(thread_send)\n\n def _handle_recv(self, msg_list: List) -> None:\n """Callback for stream.on_recv.\n\n Unpacks message, and calls handlers with it.\n """\n assert self.ioloop is not None\n assert self.session is not None\n ident, smsg = self.session.feed_identities(msg_list)\n msg = self.session.deserialize(smsg)\n # let client inspect messages\n if self._inspect:\n self._inspect(msg) # type:ignore[unreachable]\n self.call_handlers(msg)\n\n def call_handlers(self, msg: Dict[str, Any]) -> None:\n """This method is called in the ioloop thread when a message arrives.\n\n Subclasses should override this method to handle incoming messages.\n It is important to remember that this method is called in the thread\n so that some logic must be done to ensure that the application level\n handlers are called in the application thread.\n """\n pass\n\n def process_events(self) -> None:\n """Subclasses should override this with a method\n processing any pending GUI events.\n """\n pass\n\n def flush(self, timeout: float = 1.0) -> None:\n """Immediately processes all pending messages on this channel.\n\n This is only used for the IOPub channel.\n\n Callers should use this method to ensure that :meth:`call_handlers`\n has been called for all messages that have been received on the\n 0MQ SUB socket of this channel.\n\n This method is thread safe.\n\n Parameters\n ----------\n timeout : float, optional\n The maximum amount of time to spend flushing, in seconds. The\n default is one second.\n """\n # We do the IOLoop callback process twice to ensure that the IOLoop\n # gets to perform at least one full poll.\n stop_time = time.monotonic() + timeout\n assert self.ioloop is not None\n if self.stream is None or self.stream.closed():\n # don't bother scheduling flush on a thread if we're closed\n _msg = "Attempt to flush closed stream"\n raise OSError(_msg)\n\n def flush(f: Any) -> None:\n try:\n self._flush()\n except Exception as e:\n f.set_exception(e)\n else:\n f.set_result(None)\n\n for _ in range(2):\n f: Future = Future()\n self.ioloop.add_callback(partial(flush, f))\n # wait for async flush, re-raise any errors\n timeout = max(stop_time - time.monotonic(), 0)\n try:\n f.result(max(stop_time - time.monotonic(), 0))\n except TimeoutError:\n # flush with a timeout means stop waiting, not raise\n return\n\n def _flush(self) -> None:\n """Callback for :method:`self.flush`."""\n assert self.stream is not None\n self.stream.flush()\n self._flushed = True\n\n\nclass IOLoopThread(Thread):\n """Run a pyzmq ioloop in a thread to send and receive messages"""\n\n _exiting = False\n ioloop = None\n\n def __init__(self) -> None:\n """Initialize an io loop thread."""\n super().__init__()\n self.daemon = True\n\n @staticmethod\n @atexit.register\n def _notice_exit() -> None:\n # Class definitions can be torn down during interpreter shutdown.\n # We only need to set _exiting flag if this hasn't happened.\n if IOLoopThread is not None:\n IOLoopThread._exiting = True\n\n def start(self) -> None:\n """Start the IOLoop thread\n\n Don't return until self.ioloop is defined,\n which is created in the thread\n """\n self._start_future: Future = Future()\n Thread.start(self)\n # wait for start, re-raise any errors\n self._start_future.result(timeout=10)\n\n def run(self) -> None:\n """Run my loop, ignoring EINTR events in the poller"""\n try:\n loop = asyncio.new_event_loop()\n asyncio.set_event_loop(loop)\n\n async def assign_ioloop() -> None:\n self.ioloop = IOLoop.current()\n\n loop.run_until_complete(assign_ioloop())\n except Exception as e:\n self._start_future.set_exception(e)\n else:\n self._start_future.set_result(None)\n\n loop.run_until_complete(self._async_run())\n\n async def _async_run(self) -> None:\n """Run forever (until self._exiting is set)"""\n while not self._exiting:\n await asyncio.sleep(1)\n\n def stop(self) -> None:\n """Stop the channel's event loop and join its thread.\n\n This calls :meth:`~threading.Thread.join` and returns when the thread\n terminates. :class:`RuntimeError` will be raised if\n :meth:`~threading.Thread.start` is called again.\n """\n self._exiting = True\n self.join()\n self.close()\n self.ioloop = None\n\n def __del__(self) -> None:\n self.close()\n\n def close(self) -> None:\n """Close the io loop thread."""\n if self.ioloop is not None:\n try:\n self.ioloop.close(all_fds=True)\n except Exception:\n pass\n\n\nclass ThreadedKernelClient(KernelClient):\n """A KernelClient that provides thread-safe sockets with async callbacks on message replies."""\n\n @property\n def ioloop(self) -> Optional[IOLoop]: # type:ignore[override]\n if self.ioloop_thread:\n return self.ioloop_thread.ioloop\n return None\n\n ioloop_thread = Instance(IOLoopThread, allow_none=True)\n\n def start_channels(\n self,\n shell: bool = True,\n iopub: bool = True,\n stdin: bool = True,\n hb: bool = True,\n control: bool = True,\n ) -> None:\n """Start the channels on the client."""\n self.ioloop_thread = IOLoopThread()\n self.ioloop_thread.start()\n\n if shell:\n self.shell_channel._inspect = self._check_kernel_info_reply\n\n super().start_channels(shell, iopub, stdin, hb, control)\n\n def _check_kernel_info_reply(self, msg: Dict[str, Any]) -> None:\n """This is run in the ioloop thread when the kernel info reply is received"""\n if msg["msg_type"] == "kernel_info_reply":\n self._handle_kernel_info_reply(msg)\n self.shell_channel._inspect = None\n\n def stop_channels(self) -> None:\n """Stop the channels on the client."""\n super().stop_channels()\n if self.ioloop_thread and self.ioloop_thread.is_alive():\n self.ioloop_thread.stop()\n\n iopub_channel_class = Type(ThreadedZMQSocketChannel) # type:ignore[arg-type]\n shell_channel_class = Type(ThreadedZMQSocketChannel) # type:ignore[arg-type]\n stdin_channel_class = Type(ThreadedZMQSocketChannel) # type:ignore[arg-type]\n hb_channel_class = Type(HBChannel) # type:ignore[arg-type]\n control_channel_class = Type(ThreadedZMQSocketChannel) # type:ignore[arg-type]\n\n def is_alive(self) -> bool:\n """Is the kernel process still running?"""\n if self._hb_channel is not None:\n # We don't have access to the KernelManager,\n # so we use the heartbeat.\n return self._hb_channel.is_beating()\n # no heartbeat and not local, we can't tell if it's running,\n # so naively return True\n return True\n | .venv\Lib\site-packages\jupyter_client\threaded.py | threaded.py | Python | 11,283 | 0.95 | 0.196581 | 0.066202 | node-utils | 3 | 2025-01-27T14:09:22.143631 | BSD-3-Clause | false | c44783a53a726d17e4a54da7168d8ba7 |
"""\nutils:\n- provides utility wrappers to run asynchronous functions in a blocking environment.\n- vendor functions from ipython_genutils that should be retired at some point.\n"""\nfrom __future__ import annotations\n\nimport os\nfrom typing import Sequence\n\nfrom jupyter_core.utils import ensure_async, run_sync # noqa: F401 # noqa: F401\n\nfrom .session import utcnow # noqa\n\n\ndef _filefind(filename: str, path_dirs: str | Sequence[str] | None = None) -> str:\n """Find a file by looking through a sequence of paths.\n\n This iterates through a sequence of paths looking for a file and returns\n the full, absolute path of the first occurrence of the file. If no set of\n path dirs is given, the filename is tested as is, after running through\n :func:`expandvars` and :func:`expanduser`. Thus a simple call::\n\n filefind('myfile.txt')\n\n will find the file in the current working dir, but::\n\n filefind('~/myfile.txt')\n\n Will find the file in the users home directory. This function does not\n automatically try any paths, such as the cwd or the user's home directory.\n\n Parameters\n ----------\n filename : str\n The filename to look for.\n path_dirs : str, None or sequence of str\n The sequence of paths to look for the file in. If None, the filename\n need to be absolute or be in the cwd. If a string, the string is\n put into a sequence and the searched. If a sequence, walk through\n each element and join with ``filename``, calling :func:`expandvars`\n and :func:`expanduser` before testing for existence.\n\n Returns\n -------\n Raises :exc:`IOError` or returns absolute path to file.\n """\n\n # If paths are quoted, abspath gets confused, strip them...\n filename = filename.strip('"').strip("'")\n # If the input is an absolute path, just check it exists\n if os.path.isabs(filename) and os.path.isfile(filename):\n return filename\n\n if path_dirs is None:\n path_dirs = ("",)\n elif isinstance(path_dirs, str):\n path_dirs = (path_dirs,)\n\n for path in path_dirs:\n if path == ".":\n path = os.getcwd() # noqa\n testname = _expand_path(os.path.join(path, filename))\n if os.path.isfile(testname):\n return os.path.abspath(testname)\n msg = f"File {filename!r} does not exist in any of the search paths: {path_dirs!r}"\n raise OSError(msg)\n\n\ndef _expand_path(s: str) -> str:\n """Expand $VARS and ~names in a string, like a shell\n\n :Examples:\n\n In [2]: os.environ['FOO']='test'\n\n In [3]: expand_path('variable FOO is $FOO')\n Out[3]: 'variable FOO is test'\n """\n # This is a pretty subtle hack. When expand user is given a UNC path\n # on Windows (\\server\share$\%username%), os.path.expandvars, removes\n # the $ to get (\\server\share\%username%). I think it considered $\n # alone an empty var. But, we need the $ to remains there (it indicates\n # a hidden share).\n if os.name == "nt":\n s = s.replace("$\\", "IPYTHON_TEMP")\n s = os.path.expandvars(os.path.expanduser(s))\n if os.name == "nt":\n s = s.replace("IPYTHON_TEMP", "$\\")\n return s\n | .venv\Lib\site-packages\jupyter_client\utils.py | utils.py | Python | 3,178 | 0.95 | 0.166667 | 0.1 | node-utils | 421 | 2024-01-10T10:15:50.772369 | BSD-3-Clause | false | 5e113acc3c4f7bc6507c606488d672a1 |
"""Use a Windows event to interrupt a child process like SIGINT.\n\nThe child needs to explicitly listen for this - see\nipykernel.parentpoller.ParentPollerWindows for a Python implementation.\n"""\nimport ctypes\nfrom typing import Any\n\n\ndef create_interrupt_event() -> Any:\n """Create an interrupt event handle.\n\n The parent process should call this to create the\n interrupt event that is passed to the child process. It should store\n this handle and use it with ``send_interrupt`` to interrupt the child\n process.\n """\n\n # Create a security attributes struct that permits inheritance of the\n # handle by new processes.\n # FIXME: We can clean up this mess by requiring pywin32 for IPython.\n class SECURITY_ATTRIBUTES(ctypes.Structure): # noqa\n _fields_ = [\n ("nLength", ctypes.c_int),\n ("lpSecurityDescriptor", ctypes.c_void_p),\n ("bInheritHandle", ctypes.c_int),\n ]\n\n sa = SECURITY_ATTRIBUTES()\n sa_p = ctypes.pointer(sa)\n sa.nLength = ctypes.sizeof(SECURITY_ATTRIBUTES)\n sa.lpSecurityDescriptor = 0\n sa.bInheritHandle = 1\n\n return ctypes.windll.kernel32.CreateEventA( # type:ignore[attr-defined]\n sa_p,\n False,\n False,\n "", # lpEventAttributes # bManualReset # bInitialState\n ) # lpName\n\n\ndef send_interrupt(interrupt_handle: Any) -> None:\n """Sends an interrupt event using the specified handle."""\n ctypes.windll.kernel32.SetEvent(interrupt_handle) # type:ignore[attr-defined]\n | .venv\Lib\site-packages\jupyter_client\win_interrupt.py | win_interrupt.py | Python | 1,516 | 0.95 | 0.133333 | 0.083333 | python-kit | 717 | 2024-12-26T18:46:27.329736 | Apache-2.0 | false | 3f7edf05debbb72ecdaa931a8d9f4fda |
"""The version information for jupyter client."""\nimport re\nfrom typing import List, Union\n\n__version__ = "8.6.3"\n\n# Build up version_info tuple for backwards compatibility\npattern = r"(?P<major>\d+).(?P<minor>\d+).(?P<patch>\d+)(?P<rest>.*)"\nmatch = re.match(pattern, __version__)\nif match:\n parts: List[Union[int, str]] = [int(match[part]) for part in ["major", "minor", "patch"]]\n if match["rest"]:\n parts.append(match["rest"])\nelse:\n parts = []\nversion_info = tuple(parts)\n\n\nprotocol_version_info = (5, 3)\nprotocol_version = "%i.%i" % protocol_version_info\n | .venv\Lib\site-packages\jupyter_client\_version.py | _version.py | Python | 577 | 0.95 | 0.25 | 0.0625 | python-kit | 42 | 2023-10-31T09:06:29.173016 | GPL-3.0 | false | 954eb2c030cf859744effbf697f7aa2d |
"""Client-side implementations of the Jupyter protocol"""\nfrom ._version import __version__, protocol_version, protocol_version_info, version_info\nfrom .asynchronous import AsyncKernelClient\nfrom .blocking import BlockingKernelClient\nfrom .client import KernelClient\nfrom .connect import * # noqa\nfrom .launcher import * # noqa\nfrom .manager import AsyncKernelManager, KernelManager, run_kernel\nfrom .multikernelmanager import AsyncMultiKernelManager, MultiKernelManager\nfrom .provisioning import KernelProvisionerBase, LocalProvisioner\n | .venv\Lib\site-packages\jupyter_client\__init__.py | __init__.py | Python | 539 | 0.95 | 0 | 0 | vue-tools | 913 | 2024-01-04T22:40:17.220337 | Apache-2.0 | false | c06df17f0c15ac0bb37f7fb5629603cf |
"""Implements an async kernel client"""\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\nfrom __future__ import annotations\n\nimport typing as t\n\nimport zmq.asyncio\nfrom traitlets import Instance, Type\n\nfrom ..channels import AsyncZMQSocketChannel, HBChannel\nfrom ..client import KernelClient, reqrep\n\n\ndef wrapped(meth: t.Callable, channel: str) -> t.Callable:\n """Wrap a method on a channel and handle replies."""\n\n def _(self: AsyncKernelClient, *args: t.Any, **kwargs: t.Any) -> t.Any:\n reply = kwargs.pop("reply", False)\n timeout = kwargs.pop("timeout", None)\n msg_id = meth(self, *args, **kwargs)\n if not reply:\n return msg_id\n return self._recv_reply(msg_id, timeout=timeout, channel=channel)\n\n return _\n\n\nclass AsyncKernelClient(KernelClient):\n """A KernelClient with async APIs\n\n ``get_[channel]_msg()`` methods wait for and return messages on channels,\n raising :exc:`queue.Empty` if no message arrives within ``timeout`` seconds.\n """\n\n context = Instance(zmq.asyncio.Context) # type:ignore[arg-type]\n\n def _context_default(self) -> zmq.asyncio.Context:\n self._created_context = True\n return zmq.asyncio.Context()\n\n # --------------------------------------------------------------------------\n # Channel proxy methods\n # --------------------------------------------------------------------------\n\n get_shell_msg = KernelClient._async_get_shell_msg\n get_iopub_msg = KernelClient._async_get_iopub_msg\n get_stdin_msg = KernelClient._async_get_stdin_msg\n get_control_msg = KernelClient._async_get_control_msg\n\n wait_for_ready = KernelClient._async_wait_for_ready\n\n # The classes to use for the various channels\n shell_channel_class = Type(AsyncZMQSocketChannel) # type:ignore[arg-type]\n iopub_channel_class = Type(AsyncZMQSocketChannel) # type:ignore[arg-type]\n stdin_channel_class = Type(AsyncZMQSocketChannel) # type:ignore[arg-type]\n hb_channel_class = Type(HBChannel) # type:ignore[arg-type]\n control_channel_class = Type(AsyncZMQSocketChannel) # type:ignore[arg-type]\n\n _recv_reply = KernelClient._async_recv_reply\n\n # replies come on the shell channel\n execute = reqrep(wrapped, KernelClient.execute)\n history = reqrep(wrapped, KernelClient.history)\n complete = reqrep(wrapped, KernelClient.complete)\n is_complete = reqrep(wrapped, KernelClient.is_complete)\n inspect = reqrep(wrapped, KernelClient.inspect)\n kernel_info = reqrep(wrapped, KernelClient.kernel_info)\n comm_info = reqrep(wrapped, KernelClient.comm_info)\n\n is_alive = KernelClient._async_is_alive\n execute_interactive = KernelClient._async_execute_interactive\n\n # replies come on the control channel\n shutdown = reqrep(wrapped, KernelClient.shutdown, channel="control")\n | .venv\Lib\site-packages\jupyter_client\asynchronous\client.py | client.py | Python | 2,870 | 0.95 | 0.106667 | 0.145455 | awesome-app | 897 | 2023-09-19T10:23:43.058444 | MIT | false | 01ec28257052c2917e3f7c13ebeff4cf |
from .client import AsyncKernelClient # noqa\n | .venv\Lib\site-packages\jupyter_client\asynchronous\__init__.py | __init__.py | Python | 46 | 0.75 | 0 | 0 | vue-tools | 497 | 2024-01-12T10:24:23.924447 | Apache-2.0 | false | 89826217d981926d913b18cf5a678e0d |
\n\n | .venv\Lib\site-packages\jupyter_client\asynchronous\__pycache__\client.cpython-313.pyc | client.cpython-313.pyc | Other | 3,533 | 0.8 | 0.083333 | 0 | awesome-app | 771 | 2024-03-29T00:42:25.586653 | Apache-2.0 | false | 31cda43ee6d2441448240262999284e8 |
\n\n | .venv\Lib\site-packages\jupyter_client\asynchronous\__pycache__\__init__.cpython-313.pyc | __init__.cpython-313.pyc | Other | 253 | 0.7 | 0 | 0 | vue-tools | 201 | 2024-07-24T04:52:28.665889 | Apache-2.0 | false | 4b63a9b4841e0ee90d4b99e44f197e71 |
"""Implements a fully blocking kernel client.\n\nUseful for test suites and blocking terminal interfaces.\n"""\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\nfrom __future__ import annotations\n\nimport typing as t\n\nfrom traitlets import Type\n\nfrom ..channels import HBChannel, ZMQSocketChannel\nfrom ..client import KernelClient, reqrep\nfrom ..utils import run_sync\n\n\ndef wrapped(meth: t.Callable, channel: str) -> t.Callable:\n """Wrap a method on a channel and handle replies."""\n\n def _(self: BlockingKernelClient, *args: t.Any, **kwargs: t.Any) -> t.Any:\n reply = kwargs.pop("reply", False)\n timeout = kwargs.pop("timeout", None)\n msg_id = meth(self, *args, **kwargs)\n if not reply:\n return msg_id\n return self._recv_reply(msg_id, timeout=timeout, channel=channel)\n\n return _\n\n\nclass BlockingKernelClient(KernelClient):\n """A KernelClient with blocking APIs\n\n ``get_[channel]_msg()`` methods wait for and return messages on channels,\n raising :exc:`queue.Empty` if no message arrives within ``timeout`` seconds.\n """\n\n # --------------------------------------------------------------------------\n # Channel proxy methods\n # --------------------------------------------------------------------------\n\n get_shell_msg = run_sync(KernelClient._async_get_shell_msg)\n get_iopub_msg = run_sync(KernelClient._async_get_iopub_msg)\n get_stdin_msg = run_sync(KernelClient._async_get_stdin_msg)\n get_control_msg = run_sync(KernelClient._async_get_control_msg)\n\n wait_for_ready = run_sync(KernelClient._async_wait_for_ready)\n\n # The classes to use for the various channels\n shell_channel_class = Type(ZMQSocketChannel) # type:ignore[arg-type]\n iopub_channel_class = Type(ZMQSocketChannel) # type:ignore[arg-type]\n stdin_channel_class = Type(ZMQSocketChannel) # type:ignore[arg-type]\n hb_channel_class = Type(HBChannel) # type:ignore[arg-type]\n control_channel_class = Type(ZMQSocketChannel) # type:ignore[arg-type]\n\n _recv_reply = run_sync(KernelClient._async_recv_reply)\n\n # replies come on the shell channel\n execute = reqrep(wrapped, KernelClient.execute)\n history = reqrep(wrapped, KernelClient.history)\n complete = reqrep(wrapped, KernelClient.complete)\n inspect = reqrep(wrapped, KernelClient.inspect)\n kernel_info = reqrep(wrapped, KernelClient.kernel_info)\n comm_info = reqrep(wrapped, KernelClient.comm_info)\n\n is_alive = run_sync(KernelClient._async_is_alive)\n execute_interactive = run_sync(KernelClient._async_execute_interactive)\n\n # replies come on the control channel\n shutdown = reqrep(wrapped, KernelClient.shutdown, channel="control")\n | .venv\Lib\site-packages\jupyter_client\blocking\client.py | client.py | Python | 2,742 | 0.95 | 0.112676 | 0.153846 | node-utils | 862 | 2025-02-26T14:45:57.145859 | GPL-3.0 | false | 2f177750f0de905b1204842450b3bbad |
from .client import BlockingKernelClient # noqa\n | .venv\Lib\site-packages\jupyter_client\blocking\__init__.py | __init__.py | Python | 49 | 0.75 | 0 | 0 | react-lib | 457 | 2024-12-05T12:09:31.954650 | GPL-3.0 | false | c45635ad083c54f9ed788e23cd1c6311 |
\n\n | .venv\Lib\site-packages\jupyter_client\blocking\__pycache__\client.cpython-313.pyc | client.cpython-313.pyc | Other | 3,242 | 0.8 | 0.107143 | 0 | python-kit | 559 | 2023-12-17T07:52:24.061615 | BSD-3-Clause | false | 19d969ca472314e96b86d5940576aff7 |
\n\n | .venv\Lib\site-packages\jupyter_client\blocking\__pycache__\__init__.cpython-313.pyc | __init__.cpython-313.pyc | Other | 252 | 0.7 | 0 | 0 | react-lib | 239 | 2024-02-07T08:09:54.560596 | Apache-2.0 | false | 8c0a4d72a48fc7636a9760cc04d35656 |
"""A kernel manager with a tornado IOLoop"""\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\nimport typing as t\n\nimport zmq\nfrom tornado import ioloop\nfrom traitlets import Instance, Type\nfrom zmq.eventloop.zmqstream import ZMQStream\n\nfrom ..manager import AsyncKernelManager, KernelManager\nfrom .restarter import AsyncIOLoopKernelRestarter, IOLoopKernelRestarter\n\n\ndef as_zmqstream(f: t.Any) -> t.Callable:\n """Convert a socket to a zmq stream."""\n\n def wrapped(self: t.Any, *args: t.Any, **kwargs: t.Any) -> t.Any:\n save_socket_class = None\n # zmqstreams only support sync sockets\n if self.context._socket_class is not zmq.Socket:\n save_socket_class = self.context._socket_class\n self.context._socket_class = zmq.Socket\n try:\n socket = f(self, *args, **kwargs)\n finally:\n if save_socket_class:\n # restore default socket class\n self.context._socket_class = save_socket_class\n return ZMQStream(socket, self.loop)\n\n return wrapped\n\n\nclass IOLoopKernelManager(KernelManager):\n """An io loop kernel manager."""\n\n loop = Instance("tornado.ioloop.IOLoop")\n\n def _loop_default(self) -> ioloop.IOLoop:\n return ioloop.IOLoop.current()\n\n restarter_class = Type(\n default_value=IOLoopKernelRestarter,\n klass=IOLoopKernelRestarter,\n help=(\n "Type of KernelRestarter to use. "\n "Must be a subclass of IOLoopKernelRestarter.\n"\n "Override this to customize how kernel restarts are managed."\n ),\n config=True,\n )\n _restarter: t.Any = Instance("jupyter_client.ioloop.IOLoopKernelRestarter", allow_none=True)\n\n def start_restarter(self) -> None:\n """Start the restarter."""\n if self.autorestart and self.has_kernel:\n if self._restarter is None:\n self._restarter = self.restarter_class(\n kernel_manager=self, loop=self.loop, parent=self, log=self.log\n )\n self._restarter.start()\n\n def stop_restarter(self) -> None:\n """Stop the restarter."""\n if self.autorestart and self._restarter is not None:\n self._restarter.stop()\n\n connect_shell = as_zmqstream(KernelManager.connect_shell)\n connect_control = as_zmqstream(KernelManager.connect_control)\n connect_iopub = as_zmqstream(KernelManager.connect_iopub)\n connect_stdin = as_zmqstream(KernelManager.connect_stdin)\n connect_hb = as_zmqstream(KernelManager.connect_hb)\n\n\nclass AsyncIOLoopKernelManager(AsyncKernelManager):\n """An async ioloop kernel manager."""\n\n loop = Instance("tornado.ioloop.IOLoop")\n\n def _loop_default(self) -> ioloop.IOLoop:\n return ioloop.IOLoop.current()\n\n restarter_class = Type(\n default_value=AsyncIOLoopKernelRestarter,\n klass=AsyncIOLoopKernelRestarter,\n help=(\n "Type of KernelRestarter to use. "\n "Must be a subclass of AsyncIOLoopKernelManager.\n"\n "Override this to customize how kernel restarts are managed."\n ),\n config=True,\n )\n _restarter: t.Any = Instance(\n "jupyter_client.ioloop.AsyncIOLoopKernelRestarter", allow_none=True\n )\n\n def start_restarter(self) -> None:\n """Start the restarter."""\n if self.autorestart and self.has_kernel:\n if self._restarter is None:\n self._restarter = self.restarter_class(\n kernel_manager=self, loop=self.loop, parent=self, log=self.log\n )\n self._restarter.start()\n\n def stop_restarter(self) -> None:\n """Stop the restarter."""\n if self.autorestart and self._restarter is not None:\n self._restarter.stop()\n\n connect_shell = as_zmqstream(AsyncKernelManager.connect_shell)\n connect_control = as_zmqstream(AsyncKernelManager.connect_control)\n connect_iopub = as_zmqstream(AsyncKernelManager.connect_iopub)\n connect_stdin = as_zmqstream(AsyncKernelManager.connect_stdin)\n connect_hb = as_zmqstream(AsyncKernelManager.connect_hb)\n | .venv\Lib\site-packages\jupyter_client\ioloop\manager.py | manager.py | Python | 4,162 | 0.95 | 0.172414 | 0.042553 | python-kit | 764 | 2024-07-18T05:04:25.098288 | BSD-3-Clause | false | fcde566a75531e058ea634d1b0daec87 |
"""A basic in process kernel monitor with autorestarting.\n\nThis watches a kernel's state using KernelManager.is_alive and auto\nrestarts the kernel if it dies.\n"""\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\nimport time\nimport warnings\nfrom typing import Any\n\nfrom traitlets import Instance\n\nfrom ..restarter import KernelRestarter\n\n\nclass IOLoopKernelRestarter(KernelRestarter):\n """Monitor and autorestart a kernel."""\n\n loop = Instance("tornado.ioloop.IOLoop")\n\n def _loop_default(self) -> Any:\n warnings.warn(\n "IOLoopKernelRestarter.loop is deprecated in jupyter-client 5.2",\n DeprecationWarning,\n stacklevel=4,\n )\n from tornado import ioloop\n\n return ioloop.IOLoop.current()\n\n _pcallback = None\n\n def start(self) -> None:\n """Start the polling of the kernel."""\n if self._pcallback is None:\n from tornado.ioloop import PeriodicCallback\n\n self._pcallback = PeriodicCallback(\n self.poll,\n 1000 * self.time_to_dead,\n )\n self._pcallback.start()\n\n def stop(self) -> None:\n """Stop the kernel polling."""\n if self._pcallback is not None:\n self._pcallback.stop()\n self._pcallback = None\n\n\nclass AsyncIOLoopKernelRestarter(IOLoopKernelRestarter):\n """An async io loop kernel restarter."""\n\n async def poll(self) -> None: # type:ignore[override]\n """Poll the kernel."""\n if self.debug:\n self.log.debug("Polling kernel...")\n is_alive = await self.kernel_manager.is_alive()\n now = time.time()\n if not is_alive:\n self._last_dead = now\n if self._restarting:\n self._restart_count += 1\n else:\n self._restart_count = 1\n\n if self._restart_count > self.restart_limit:\n self.log.warning("AsyncIOLoopKernelRestarter: restart failed")\n self._fire_callbacks("dead")\n self._restarting = False\n self._restart_count = 0\n self.stop()\n else:\n newports = self.random_ports_until_alive and self._initial_startup\n self.log.info(\n "AsyncIOLoopKernelRestarter: restarting kernel (%i/%i), %s random ports",\n self._restart_count,\n self.restart_limit,\n "new" if newports else "keep",\n )\n self._fire_callbacks("restart")\n await self.kernel_manager.restart_kernel(now=True, newports=newports)\n self._restarting = True\n else:\n # Since `is_alive` only tests that the kernel process is alive, it does not\n # indicate that the kernel has successfully completed startup. To solve this\n # correctly, we would need to wait for a kernel info reply, but it is not\n # necessarily appropriate to start a kernel client + channels in the\n # restarter. Therefore, we use "has been alive continuously for X time" as a\n # heuristic for a stable start up.\n # See https://github.com/jupyter/jupyter_client/pull/717 for details.\n stable_start_time = self.stable_start_time\n if self.kernel_manager.provisioner:\n stable_start_time = self.kernel_manager.provisioner.get_stable_start_time(\n recommended=stable_start_time\n )\n if self._initial_startup and now - self._last_dead >= stable_start_time:\n self._initial_startup = False\n if self._restarting and now - self._last_dead >= stable_start_time:\n self.log.debug("AsyncIOLoopKernelRestarter: restart apparently succeeded")\n self._restarting = False\n | .venv\Lib\site-packages\jupyter_client\ioloop\restarter.py | restarter.py | Python | 3,906 | 0.95 | 0.205882 | 0.104651 | vue-tools | 882 | 2025-06-01T20:49:15.538257 | GPL-3.0 | false | 8f3480c6645538ff46f76e9f2bed6bb9 |
from .manager import AsyncIOLoopKernelManager # noqa\nfrom .manager import IOLoopKernelManager # noqa\nfrom .restarter import AsyncIOLoopKernelRestarter # noqa\nfrom .restarter import IOLoopKernelRestarter # noqa\n | .venv\Lib\site-packages\jupyter_client\ioloop\__init__.py | __init__.py | Python | 214 | 0.95 | 0 | 0 | vue-tools | 659 | 2024-07-21T13:37:08.099769 | BSD-3-Clause | false | dbab049115fee873b70f54d366ad4c36 |
\n\n | .venv\Lib\site-packages\jupyter_client\ioloop\__pycache__\manager.cpython-313.pyc | manager.cpython-313.pyc | Other | 5,960 | 0.95 | 0 | 0 | react-lib | 777 | 2024-09-04T10:18:44.636782 | BSD-3-Clause | false | 0a0a3d8a7b766a269e61db0222d57096 |
\n\n | .venv\Lib\site-packages\jupyter_client\ioloop\__pycache__\restarter.cpython-313.pyc | restarter.cpython-313.pyc | Other | 4,872 | 0.8 | 0.025641 | 0 | python-kit | 898 | 2023-08-27T06:36:21.547017 | Apache-2.0 | false | f886bb4473198c1b99eeea59d3d4d14b |
\n\n | .venv\Lib\site-packages\jupyter_client\ioloop\__pycache__\__init__.cpython-313.pyc | __init__.cpython-313.pyc | Other | 404 | 0.7 | 0 | 0 | react-lib | 783 | 2024-08-25T20:21:34.315884 | GPL-3.0 | false | 0705f2be479b7e6d804bcd4f3b9d36cd |
"""Kernel Provisioner Classes"""\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\nimport glob\nimport sys\nfrom os import getenv, path\nfrom typing import Any, Dict, List\n\n# See compatibility note on `group` keyword in https://docs.python.org/3/library/importlib.metadata.html#entry-points\nif sys.version_info < (3, 10): # pragma: no cover\n from importlib_metadata import EntryPoint, entry_points # type:ignore[import-not-found]\nelse: # pragma: no cover\n from importlib.metadata import EntryPoint, entry_points\n\nfrom traitlets.config import SingletonConfigurable, Unicode, default\n\nfrom .provisioner_base import KernelProvisionerBase\n\n\nclass KernelProvisionerFactory(SingletonConfigurable):\n """\n :class:`KernelProvisionerFactory` is responsible for creating provisioner instances.\n\n A singleton instance, `KernelProvisionerFactory` is also used by the :class:`KernelSpecManager`\n to validate `kernel_provisioner` references found in kernel specifications to confirm their\n availability (in cases where the kernel specification references a kernel provisioner that has\n not been installed into the current Python environment).\n\n It's ``default_provisioner_name`` attribute can be used to specify the default provisioner\n to use when a kernel_spec is found to not reference a provisioner. It's value defaults to\n `"local-provisioner"` which identifies the local provisioner implemented by\n :class:`LocalProvisioner`.\n """\n\n GROUP_NAME = "jupyter_client.kernel_provisioners"\n provisioners: Dict[str, EntryPoint] = {}\n\n default_provisioner_name_env = "JUPYTER_DEFAULT_PROVISIONER_NAME"\n default_provisioner_name = Unicode(\n config=True,\n help="""Indicates the name of the provisioner to use when no kernel_provisioner\n entry is present in the kernelspec.""",\n )\n\n @default("default_provisioner_name")\n def _default_provisioner_name_default(self) -> str:\n """The default provisioner name."""\n return getenv(self.default_provisioner_name_env, "local-provisioner")\n\n def __init__(self, **kwargs: Any) -> None:\n """Initialize a kernel provisioner factory."""\n super().__init__(**kwargs)\n\n for ep in KernelProvisionerFactory._get_all_provisioners():\n self.provisioners[ep.name] = ep\n\n def is_provisioner_available(self, kernel_spec: Any) -> bool:\n """\n Reads the associated ``kernel_spec`` to determine the provisioner and returns whether it\n exists as an entry_point (True) or not (False). If the referenced provisioner is not\n in the current cache or cannot be loaded via entry_points, a warning message is issued\n indicating it is not available.\n """\n is_available: bool = True\n provisioner_cfg = self._get_provisioner_config(kernel_spec)\n provisioner_name = str(provisioner_cfg.get("provisioner_name"))\n if not self._check_availability(provisioner_name):\n is_available = False\n self.log.warning(\n f"Kernel '{kernel_spec.display_name}' is referencing a kernel "\n f"provisioner ('{provisioner_name}') that is not available. "\n f"Ensure the appropriate package has been installed and retry."\n )\n return is_available\n\n def create_provisioner_instance(\n self, kernel_id: str, kernel_spec: Any, parent: Any\n ) -> KernelProvisionerBase:\n """\n Reads the associated ``kernel_spec`` to see if it has a `kernel_provisioner` stanza.\n If one exists, it instantiates an instance. If a kernel provisioner is not\n specified in the kernel specification, a default provisioner stanza is fabricated\n and instantiated corresponding to the current value of ``default_provisioner_name`` trait.\n The instantiated instance is returned.\n\n If the provisioner is found to not exist (not registered via entry_points),\n `ModuleNotFoundError` is raised.\n """\n provisioner_cfg = self._get_provisioner_config(kernel_spec)\n provisioner_name = str(provisioner_cfg.get("provisioner_name"))\n if not self._check_availability(provisioner_name):\n msg = f"Kernel provisioner '{provisioner_name}' has not been registered."\n raise ModuleNotFoundError(msg)\n\n self.log.debug(\n f"Instantiating kernel '{kernel_spec.display_name}' with "\n f"kernel provisioner: {provisioner_name}"\n )\n provisioner_class = self.provisioners[provisioner_name].load()\n provisioner_config = provisioner_cfg.get("config")\n provisioner: KernelProvisionerBase = provisioner_class(\n kernel_id=kernel_id, kernel_spec=kernel_spec, parent=parent, **provisioner_config\n )\n return provisioner\n\n def _check_availability(self, provisioner_name: str) -> bool:\n """\n Checks that the given provisioner is available.\n\n If the given provisioner is not in the current set of loaded provisioners an attempt\n is made to fetch the named entry point and, if successful, loads it into the cache.\n\n :param provisioner_name:\n :return:\n """\n is_available = True\n if provisioner_name not in self.provisioners:\n try:\n ep = self._get_provisioner(provisioner_name)\n self.provisioners[provisioner_name] = ep # Update cache\n except Exception:\n is_available = False\n return is_available\n\n def _get_provisioner_config(self, kernel_spec: Any) -> Dict[str, Any]:\n """\n Return the kernel_provisioner stanza from the kernel_spec.\n\n Checks the kernel_spec's metadata dictionary for a kernel_provisioner entry.\n If found, it is returned, else one is created relative to the DEFAULT_PROVISIONER\n and returned.\n\n Parameters\n ----------\n kernel_spec : Any - this is a KernelSpec type but listed as Any to avoid circular import\n The kernel specification object from which the provisioner dictionary is derived.\n\n Returns\n -------\n dict\n The provisioner portion of the kernel_spec. If one does not exist, it will contain\n the default information. If no `config` sub-dictionary exists, an empty `config`\n dictionary will be added.\n """\n env_provisioner = kernel_spec.metadata.get("kernel_provisioner", {})\n if "provisioner_name" in env_provisioner: # If no provisioner_name, return default\n if (\n "config" not in env_provisioner\n ): # if provisioner_name, but no config stanza, add one\n env_provisioner.update({"config": {}})\n return env_provisioner # Return what we found (plus config stanza if necessary)\n return {"provisioner_name": self.default_provisioner_name, "config": {}}\n\n def get_provisioner_entries(self) -> Dict[str, str]:\n """\n Returns a dictionary of provisioner entries.\n\n The key is the provisioner name for its entry point. The value is the colon-separated\n string of the entry point's module name and object name.\n """\n entries = {}\n for name, ep in self.provisioners.items():\n entries[name] = ep.value\n return entries\n\n @staticmethod\n def _get_all_provisioners() -> List[EntryPoint]:\n """Wrapper around entry_points (to fetch the set of provisioners) - primarily to facilitate testing."""\n return entry_points(group=KernelProvisionerFactory.GROUP_NAME)\n\n def _get_provisioner(self, name: str) -> EntryPoint:\n """Wrapper around entry_points (to fetch a single provisioner) - primarily to facilitate testing."""\n eps = entry_points(group=KernelProvisionerFactory.GROUP_NAME, name=name)\n if eps:\n return eps[0]\n\n # Check if the entrypoint name is 'local-provisioner'. Although this should never\n # happen, we have seen cases where the previous distribution of jupyter_client has\n # remained which doesn't include kernel-provisioner entrypoints (so 'local-provisioner'\n # is deemed not found even though its definition is in THIS package). In such cases,\n # the entrypoints package uses what it first finds - which is the older distribution\n # resulting in a violation of a supposed invariant condition. To address this scenario,\n # we will log a warning message indicating this situation, then build the entrypoint\n # instance ourselves - since we have that information.\n if name == "local-provisioner":\n distros = glob.glob(f"{path.dirname(path.dirname(__file__))}-*")\n self.log.warning(\n f"Kernel Provisioning: The 'local-provisioner' is not found. This is likely "\n f"due to the presence of multiple jupyter_client distributions and a previous "\n f"distribution is being used as the source for entrypoints - which does not "\n f"include 'local-provisioner'. That distribution should be removed such that "\n f"only the version-appropriate distribution remains (version >= 7). Until "\n f"then, a 'local-provisioner' entrypoint will be automatically constructed "\n f"and used.\nThe candidate distribution locations are: {distros}"\n )\n return EntryPoint(\n "local-provisioner", "jupyter_client.provisioning", "LocalProvisioner"\n )\n\n raise\n | .venv\Lib\site-packages\jupyter_client\provisioning\factory.py | factory.py | Python | 9,651 | 0.95 | 0.165 | 0.064327 | python-kit | 785 | 2024-05-12T22:21:47.775868 | GPL-3.0 | false | 3b6c958da035a4633aa1cb96359b7687 |
"""Kernel Provisioner Classes"""\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\nimport asyncio\nimport os\nimport signal\nimport sys\nfrom typing import TYPE_CHECKING, Any, Dict, List, Optional\n\nfrom ..connect import KernelConnectionInfo, LocalPortCache\nfrom ..launcher import launch_kernel\nfrom ..localinterfaces import is_local_ip, local_ips\nfrom .provisioner_base import KernelProvisionerBase\n\n\nclass LocalProvisioner(KernelProvisionerBase): # type:ignore[misc]\n """\n :class:`LocalProvisioner` is a concrete class of ABC :py:class:`KernelProvisionerBase`\n and is the out-of-box default implementation used when no kernel provisioner is\n specified in the kernel specification (``kernel.json``). It provides functional\n parity to existing applications by launching the kernel locally and using\n :class:`subprocess.Popen` to manage its lifecycle.\n\n This class is intended to be subclassed for customizing local kernel environments\n and serve as a reference implementation for other custom provisioners.\n """\n\n process = None\n _exit_future = None\n pid = None\n pgid = None\n ip = None\n ports_cached = False\n\n @property\n def has_process(self) -> bool:\n return self.process is not None\n\n async def poll(self) -> Optional[int]:\n """Poll the provisioner."""\n ret = 0\n if self.process:\n ret = self.process.poll() # type:ignore[unreachable]\n return ret\n\n async def wait(self) -> Optional[int]:\n """Wait for the provisioner process."""\n ret = 0\n if self.process:\n # Use busy loop at 100ms intervals, polling until the process is\n # not alive. If we find the process is no longer alive, complete\n # its cleanup via the blocking wait(). Callers are responsible for\n # issuing calls to wait() using a timeout (see kill()).\n while await self.poll() is None: # type:ignore[unreachable]\n await asyncio.sleep(0.1)\n\n # Process is no longer alive, wait and clear\n ret = self.process.wait()\n # Make sure all the fds get closed.\n for attr in ["stdout", "stderr", "stdin"]:\n fid = getattr(self.process, attr)\n if fid:\n fid.close()\n self.process = None # allow has_process to now return False\n return ret\n\n async def send_signal(self, signum: int) -> None:\n """Sends a signal to the process group of the kernel (this\n usually includes the kernel and any subprocesses spawned by\n the kernel).\n\n Note that since only SIGTERM is supported on Windows, we will\n check if the desired signal is for interrupt and apply the\n applicable code on Windows in that case.\n """\n if self.process:\n if signum == signal.SIGINT and sys.platform == "win32": # type:ignore[unreachable]\n from ..win_interrupt import send_interrupt\n\n send_interrupt(self.process.win32_interrupt_event)\n return\n\n # Prefer process-group over process\n if self.pgid and hasattr(os, "killpg"):\n try:\n os.killpg(self.pgid, signum)\n return\n except OSError:\n pass # We'll retry sending the signal to only the process below\n\n # If we're here, send the signal to the process and let caller handle exceptions\n self.process.send_signal(signum)\n return\n\n async def kill(self, restart: bool = False) -> None:\n """Kill the provisioner and optionally restart."""\n if self.process:\n if hasattr(signal, "SIGKILL"): # type:ignore[unreachable]\n # If available, give preference to signalling the process-group over `kill()`.\n try:\n await self.send_signal(signal.SIGKILL)\n return\n except OSError:\n pass\n try:\n self.process.kill()\n except OSError as e:\n LocalProvisioner._tolerate_no_process(e)\n\n async def terminate(self, restart: bool = False) -> None:\n """Terminate the provisioner and optionally restart."""\n if self.process:\n if hasattr(signal, "SIGTERM"): # type:ignore[unreachable]\n # If available, give preference to signalling the process group over `terminate()`.\n try:\n await self.send_signal(signal.SIGTERM)\n return\n except OSError:\n pass\n try:\n self.process.terminate()\n except OSError as e:\n LocalProvisioner._tolerate_no_process(e)\n\n @staticmethod\n def _tolerate_no_process(os_error: OSError) -> None:\n # In Windows, we will get an Access Denied error if the process\n # has already terminated. Ignore it.\n if sys.platform == "win32":\n if os_error.winerror != 5:\n raise\n # On Unix, we may get an ESRCH error (or ProcessLookupError instance) if\n # the process has already terminated. Ignore it.\n else:\n from errno import ESRCH\n\n if not isinstance(os_error, ProcessLookupError) or os_error.errno != ESRCH:\n raise\n\n async def cleanup(self, restart: bool = False) -> None:\n """Clean up the resources used by the provisioner and optionally restart."""\n if self.ports_cached and not restart:\n # provisioner is about to be destroyed, return cached ports\n lpc = LocalPortCache.instance()\n ports = (\n self.connection_info["shell_port"],\n self.connection_info["iopub_port"],\n self.connection_info["stdin_port"],\n self.connection_info["hb_port"],\n self.connection_info["control_port"],\n )\n for port in ports:\n if TYPE_CHECKING:\n assert isinstance(port, int)\n lpc.return_port(port)\n\n async def pre_launch(self, **kwargs: Any) -> Dict[str, Any]:\n """Perform any steps in preparation for kernel process launch.\n\n This includes applying additional substitutions to the kernel launch command and env.\n It also includes preparation of launch parameters.\n\n Returns the updated kwargs.\n """\n\n # This should be considered temporary until a better division of labor can be defined.\n km = self.parent\n if km:\n if km.transport == "tcp" and not is_local_ip(km.ip):\n msg = (\n "Can only launch a kernel on a local interface. "\n f"This one is not: {km.ip}."\n "Make sure that the '*_address' attributes are "\n "configured properly. "\n f"Currently valid addresses are: {local_ips()}"\n )\n raise RuntimeError(msg)\n # build the Popen cmd\n extra_arguments = kwargs.pop("extra_arguments", [])\n\n # write connection file / get default ports\n # TODO - change when handshake pattern is adopted\n if km.cache_ports and not self.ports_cached:\n lpc = LocalPortCache.instance()\n km.shell_port = lpc.find_available_port(km.ip)\n km.iopub_port = lpc.find_available_port(km.ip)\n km.stdin_port = lpc.find_available_port(km.ip)\n km.hb_port = lpc.find_available_port(km.ip)\n km.control_port = lpc.find_available_port(km.ip)\n self.ports_cached = True\n if "env" in kwargs:\n jupyter_session = kwargs["env"].get("JPY_SESSION_NAME", "")\n km.write_connection_file(jupyter_session=jupyter_session)\n else:\n km.write_connection_file()\n self.connection_info = km.get_connection_info()\n\n kernel_cmd = km.format_kernel_cmd(\n extra_arguments=extra_arguments\n ) # This needs to remain here for b/c\n else:\n extra_arguments = kwargs.pop("extra_arguments", [])\n kernel_cmd = self.kernel_spec.argv + extra_arguments\n\n return await super().pre_launch(cmd=kernel_cmd, **kwargs)\n\n async def launch_kernel(self, cmd: List[str], **kwargs: Any) -> KernelConnectionInfo:\n """Launch a kernel with a command."""\n scrubbed_kwargs = LocalProvisioner._scrub_kwargs(kwargs)\n self.process = launch_kernel(cmd, **scrubbed_kwargs)\n pgid = None\n if hasattr(os, "getpgid"):\n try:\n pgid = os.getpgid(self.process.pid)\n except OSError:\n pass\n\n self.pid = self.process.pid\n self.pgid = pgid\n return self.connection_info\n\n @staticmethod\n def _scrub_kwargs(kwargs: Dict[str, Any]) -> Dict[str, Any]:\n """Remove any keyword arguments that Popen does not tolerate."""\n keywords_to_scrub: List[str] = ["extra_arguments", "kernel_id"]\n scrubbed_kwargs = kwargs.copy()\n for kw in keywords_to_scrub:\n scrubbed_kwargs.pop(kw, None)\n return scrubbed_kwargs\n\n async def get_provisioner_info(self) -> Dict:\n """Captures the base information necessary for persistence relative to this instance."""\n provisioner_info = await super().get_provisioner_info()\n provisioner_info.update({"pid": self.pid, "pgid": self.pgid, "ip": self.ip})\n return provisioner_info\n\n async def load_provisioner_info(self, provisioner_info: Dict) -> None:\n """Loads the base information necessary for persistence relative to this instance."""\n await super().load_provisioner_info(provisioner_info)\n self.pid = provisioner_info["pid"]\n self.pgid = provisioner_info["pgid"]\n self.ip = provisioner_info["ip"]\n | .venv\Lib\site-packages\jupyter_client\provisioning\local_provisioner.py | local_provisioner.py | Python | 10,055 | 0.95 | 0.252066 | 0.099526 | react-lib | 650 | 2024-06-01T13:51:13.541209 | GPL-3.0 | false | 4c63179818e68b39a759665ee073a8cc |
"""Kernel Provisioner Classes"""\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\nimport os\nfrom abc import ABC, ABCMeta, abstractmethod\nfrom typing import Any, Dict, List, Optional, Union\n\nfrom traitlets.config import Instance, LoggingConfigurable, Unicode\n\nfrom ..connect import KernelConnectionInfo\n\n\nclass KernelProvisionerMeta(ABCMeta, type(LoggingConfigurable)): # type: ignore[misc]\n pass\n\n\nclass KernelProvisionerBase( # type:ignore[misc]\n ABC, LoggingConfigurable, metaclass=KernelProvisionerMeta\n):\n """\n Abstract base class defining methods for KernelProvisioner classes.\n\n A majority of methods are abstract (requiring implementations via a subclass) while\n some are optional and others provide implementations common to all instances.\n Subclasses should be aware of which methods require a call to the superclass.\n\n Many of these methods model those of :class:`subprocess.Popen` for parity with\n previous versions where the kernel process was managed directly.\n """\n\n # The kernel specification associated with this provisioner\n kernel_spec: Any = Instance("jupyter_client.kernelspec.KernelSpec", allow_none=True)\n kernel_id: Union[str, Unicode] = Unicode(None, allow_none=True)\n connection_info: KernelConnectionInfo = {}\n\n @property\n @abstractmethod\n def has_process(self) -> bool:\n """\n Returns true if this provisioner is currently managing a process.\n\n This property is asserted to be True immediately following a call to\n the provisioner's :meth:`launch_kernel` method.\n """\n pass\n\n @abstractmethod\n async def poll(self) -> Optional[int]:\n """\n Checks if kernel process is still running.\n\n If running, None is returned, otherwise the process's integer-valued exit code is returned.\n This method is called from :meth:`KernelManager.is_alive`.\n """\n pass\n\n @abstractmethod\n async def wait(self) -> Optional[int]:\n """\n Waits for kernel process to terminate.\n\n This method is called from `KernelManager.finish_shutdown()` and\n `KernelManager.kill_kernel()` when terminating a kernel gracefully or\n immediately, respectively.\n """\n pass\n\n @abstractmethod\n async def send_signal(self, signum: int) -> None:\n """\n Sends signal identified by signum to the kernel process.\n\n This method is called from `KernelManager.signal_kernel()` to send the\n kernel process a signal.\n """\n pass\n\n @abstractmethod\n async def kill(self, restart: bool = False) -> None:\n """\n Kill the kernel process.\n\n This is typically accomplished via a SIGKILL signal, which cannot be caught.\n This method is called from `KernelManager.kill_kernel()` when terminating\n a kernel immediately.\n\n restart is True if this operation will precede a subsequent launch_kernel request.\n """\n pass\n\n @abstractmethod\n async def terminate(self, restart: bool = False) -> None:\n """\n Terminates the kernel process.\n\n This is typically accomplished via a SIGTERM signal, which can be caught, allowing\n the kernel provisioner to perform possible cleanup of resources. This method is\n called indirectly from `KernelManager.finish_shutdown()` during a kernel's\n graceful termination.\n\n restart is True if this operation precedes a start launch_kernel request.\n """\n pass\n\n @abstractmethod\n async def launch_kernel(self, cmd: List[str], **kwargs: Any) -> KernelConnectionInfo:\n """\n Launch the kernel process and return its connection information.\n\n This method is called from `KernelManager.launch_kernel()` during the\n kernel manager's start kernel sequence.\n """\n pass\n\n @abstractmethod\n async def cleanup(self, restart: bool = False) -> None:\n """\n Cleanup any resources allocated on behalf of the kernel provisioner.\n\n This method is called from `KernelManager.cleanup_resources()` as part of\n its shutdown kernel sequence.\n\n restart is True if this operation precedes a start launch_kernel request.\n """\n pass\n\n async def shutdown_requested(self, restart: bool = False) -> None:\n """\n Allows the provisioner to determine if the kernel's shutdown has been requested.\n\n This method is called from `KernelManager.request_shutdown()` as part of\n its shutdown sequence.\n\n This method is optional and is primarily used in scenarios where the provisioner\n may need to perform other operations in preparation for a kernel's shutdown.\n """\n pass\n\n async def pre_launch(self, **kwargs: Any) -> Dict[str, Any]:\n """\n Perform any steps in preparation for kernel process launch.\n\n This includes applying additional substitutions to the kernel launch command\n and environment. It also includes preparation of launch parameters.\n\n NOTE: Subclass implementations are advised to call this method as it applies\n environment variable substitutions from the local environment and calls the\n provisioner's :meth:`_finalize_env()` method to allow each provisioner the\n ability to cleanup the environment variables that will be used by the kernel.\n\n This method is called from `KernelManager.pre_start_kernel()` as part of its\n start kernel sequence.\n\n Returns the (potentially updated) keyword arguments that are passed to\n :meth:`launch_kernel()`.\n """\n env = kwargs.pop("env", os.environ).copy()\n env.update(self.__apply_env_substitutions(env))\n self._finalize_env(env)\n kwargs["env"] = env\n\n return kwargs\n\n async def post_launch(self, **kwargs: Any) -> None:\n """\n Perform any steps following the kernel process launch.\n\n This method is called from `KernelManager.post_start_kernel()` as part of its\n start kernel sequence.\n """\n pass\n\n async def get_provisioner_info(self) -> Dict[str, Any]:\n """\n Captures the base information necessary for persistence relative to this instance.\n\n This enables applications that subclass `KernelManager` to persist a kernel provisioner's\n relevant information to accomplish functionality like disaster recovery or high availability\n by calling this method via the kernel manager's `provisioner` attribute.\n\n NOTE: The superclass method must always be called first to ensure proper serialization.\n """\n provisioner_info: Dict[str, Any] = {}\n provisioner_info["kernel_id"] = self.kernel_id\n provisioner_info["connection_info"] = self.connection_info\n return provisioner_info\n\n async def load_provisioner_info(self, provisioner_info: Dict) -> None:\n """\n Loads the base information necessary for persistence relative to this instance.\n\n The inverse of `get_provisioner_info()`, this enables applications that subclass\n `KernelManager` to re-establish communication with a provisioner that is managing\n a (presumably) remote kernel from an entirely different process that the original\n provisioner.\n\n NOTE: The superclass method must always be called first to ensure proper deserialization.\n """\n self.kernel_id = provisioner_info["kernel_id"]\n self.connection_info = provisioner_info["connection_info"]\n\n def get_shutdown_wait_time(self, recommended: float = 5.0) -> float:\n """\n Returns the time allowed for a complete shutdown. This may vary by provisioner.\n\n This method is called from `KernelManager.finish_shutdown()` during the graceful\n phase of its kernel shutdown sequence.\n\n The recommended value will typically be what is configured in the kernel manager.\n """\n return recommended\n\n def get_stable_start_time(self, recommended: float = 10.0) -> float:\n """\n Returns the expected upper bound for a kernel (re-)start to complete.\n This may vary by provisioner.\n\n The recommended value will typically be what is configured in the kernel restarter.\n """\n return recommended\n\n def _finalize_env(self, env: Dict[str, str]) -> None:\n """\n Ensures env is appropriate prior to launch.\n\n This method is called from `KernelProvisionerBase.pre_launch()` during the kernel's\n start sequence.\n\n NOTE: Subclasses should be sure to call super()._finalize_env(env)\n """\n if self.kernel_spec.language and self.kernel_spec.language.lower().startswith("python"):\n # Don't allow PYTHONEXECUTABLE to be passed to kernel process.\n # If set, it can bork all the things.\n env.pop("PYTHONEXECUTABLE", None)\n\n def __apply_env_substitutions(self, substitution_values: Dict[str, str]) -> Dict[str, str]:\n """\n Walks entries in the kernelspec's env stanza and applies substitutions from current env.\n\n This method is called from `KernelProvisionerBase.pre_launch()` during the kernel's\n start sequence.\n\n Returns the substituted list of env entries.\n\n NOTE: This method is private and is not intended to be overridden by provisioners.\n """\n substituted_env = {}\n if self.kernel_spec:\n from string import Template\n\n # For each templated env entry, fill any templated references\n # matching names of env variables with those values and build\n # new dict with substitutions.\n templated_env = self.kernel_spec.env\n for k, v in templated_env.items():\n substituted_env.update({k: Template(v).safe_substitute(substitution_values)})\n return substituted_env\n | .venv\Lib\site-packages\jupyter_client\provisioning\provisioner_base.py | provisioner_base.py | Python | 9,970 | 0.95 | 0.155642 | 0.040201 | react-lib | 19 | 2023-09-16T23:00:08.658819 | BSD-3-Clause | false | 4c6c5d57c3692b535dcdc8ddc5bf29b8 |
from .factory import KernelProvisionerFactory # noqa\nfrom .local_provisioner import LocalProvisioner # noqa\nfrom .provisioner_base import KernelProvisionerBase # noqa\n | .venv\Lib\site-packages\jupyter_client\provisioning\__init__.py | __init__.py | Python | 170 | 0.95 | 0 | 0 | vue-tools | 524 | 2024-03-29T07:30:08.472773 | GPL-3.0 | false | 06d4ca37c79f5ff194cafbf94276c7a7 |
\n\n | .venv\Lib\site-packages\jupyter_client\provisioning\__pycache__\factory.cpython-313.pyc | factory.cpython-313.pyc | Other | 10,128 | 0.95 | 0.076923 | 0.009434 | awesome-app | 665 | 2024-01-09T14:59:07.564563 | Apache-2.0 | false | d9aa5f73ed87a23c97f2edecc2a1b514 |
\n\n | .venv\Lib\site-packages\jupyter_client\provisioning\__pycache__\local_provisioner.cpython-313.pyc | local_provisioner.cpython-313.pyc | Other | 12,275 | 0.95 | 0.122642 | 0.019802 | react-lib | 860 | 2023-09-05T17:16:06.331793 | GPL-3.0 | false | 5368291112faf23c0a5ec1e400099f60 |
\n\n | .venv\Lib\site-packages\jupyter_client\provisioning\__pycache__\provisioner_base.cpython-313.pyc | provisioner_base.cpython-313.pyc | Other | 11,765 | 0.95 | 0.073171 | 0.004739 | awesome-app | 201 | 2023-10-26T07:14:09.913683 | BSD-3-Clause | false | d88d2c9e11873ef2eb8c069353903492 |
\n\n | .venv\Lib\site-packages\jupyter_client\provisioning\__pycache__\__init__.cpython-313.pyc | __init__.cpython-313.pyc | Other | 383 | 0.7 | 0 | 0 | node-utils | 844 | 2024-03-14T21:53:10.282602 | BSD-3-Clause | false | eafde640eebc2e1656900a61c8a3abf0 |
"""Sample script showing how to do local port forwarding over paramiko.\n\nThis script connects to the requested SSH server and sets up local port\nforwarding (the openssh -L option) from a local port through a tunneled\nconnection to a destination reachable from the SSH server machine.\n"""\n#\n# This file is adapted from a paramiko demo, and thus licensed under LGPL 2.1.\n# Original Copyright (C) 2003-2007 Robey Pointer <robeypointer@gmail.com>\n# Edits Copyright (C) 2010 The IPython Team\n#\n# Paramiko is free software; you can redistribute it and/or modify it under the\n# terms of the GNU Lesser General Public License as published by the Free\n# Software Foundation; either version 2.1 of the License, or (at your option)\n# any later version.\n#\n# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY\n# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR\n# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more\n# details.\n#\n# You should have received a copy of the GNU Lesser General Public License\n# along with Paramiko; if not, write to the Free Software Foundation, Inc.,\n# 51 Franklin Street, Fifth Floor, Boston, MA 02111-1301 USA.\nimport logging\nimport select\nimport socketserver\nimport typing as t\n\nlogger = logging.getLogger("ssh")\n\n\nclass ForwardServer(socketserver.ThreadingTCPServer):\n """A server to use for ssh forwarding."""\n\n daemon_threads = True\n allow_reuse_address = True\n\n\nclass Handler(socketserver.BaseRequestHandler):\n """A handle for server requests."""\n\n @t.no_type_check\n def handle(self):\n """Handle a request."""\n try:\n chan = self.ssh_transport.open_channel(\n "direct-tcpip",\n (self.chain_host, self.chain_port),\n self.request.getpeername(),\n )\n except Exception as e:\n logger.debug(\n "Incoming request to %s:%d failed: %s" % (self.chain_host, self.chain_port, repr(e))\n )\n return\n if chan is None:\n logger.debug(\n "Incoming request to %s:%d was rejected by the SSH server."\n % (self.chain_host, self.chain_port)\n )\n return\n\n logger.debug(\n "Connected! Tunnel open {!r} -> {!r} -> {!r}".format(\n self.request.getpeername(),\n chan.getpeername(),\n (self.chain_host, self.chain_port),\n )\n )\n while True:\n r, w, x = select.select([self.request, chan], [], [])\n if self.request in r:\n data = self.request.recv(1024)\n if len(data) == 0:\n break\n chan.send(data)\n if chan in r:\n data = chan.recv(1024)\n if len(data) == 0:\n break\n self.request.send(data)\n chan.close()\n self.request.close()\n logger.debug("Tunnel closed ")\n\n\ndef forward_tunnel(local_port: int, remote_host: str, remote_port: int, transport: t.Any) -> None:\n """Forward an ssh tunnel."""\n\n # this is a little convoluted, but lets me configure things for the Handler\n # object. (SocketServer doesn't give Handlers any way to access the outer\n # server normally.)\n class SubHander(Handler):\n chain_host = remote_host\n chain_port = remote_port\n ssh_transport = transport\n\n ForwardServer(("127.0.0.1", local_port), SubHander).serve_forever()\n\n\n__all__ = ["forward_tunnel"]\n | .venv\Lib\site-packages\jupyter_client\ssh\forward.py | forward.py | Python | 3,560 | 0.95 | 0.166667 | 0.241379 | react-lib | 604 | 2024-12-05T16:11:52.860222 | MIT | false | 7f1eb58807a12176ea4f5408776410cd |
"""Basic ssh tunnel utilities, and convenience functions for tunneling\nzeromq connections.\n"""\n# Copyright (C) 2010-2011 IPython Development Team\n# Copyright (C) 2011- PyZMQ Developers\n#\n# Redistributed from IPython under the terms of the BSD License.\nfrom __future__ import annotations\n\nimport atexit\nimport os\nimport re\nimport signal\nimport socket\nimport sys\nimport warnings\nfrom getpass import getpass, getuser\nfrom multiprocessing import Process\nfrom typing import Any, cast\n\ntry:\n with warnings.catch_warnings():\n warnings.simplefilter("ignore", DeprecationWarning)\n import paramiko\n\n SSHException = paramiko.ssh_exception.SSHException\nexcept ImportError:\n paramiko = None # type:ignore[assignment]\n\n class SSHException(Exception): # type:ignore[no-redef] # noqa\n pass\n\nelse:\n from .forward import forward_tunnel\n\ntry:\n import pexpect # type: ignore[import-untyped]\nexcept ImportError:\n pexpect = None\n\n\ndef select_random_ports(n: int) -> list[int]:\n """Select and return n random ports that are available."""\n ports = []\n sockets = []\n for _ in range(n):\n sock = socket.socket()\n sock.bind(("", 0))\n ports.append(sock.getsockname()[1])\n sockets.append(sock)\n for sock in sockets:\n sock.close()\n return ports\n\n\n# -----------------------------------------------------------------------------\n# Check for passwordless login\n# -----------------------------------------------------------------------------\n_password_pat = re.compile((rb"pass(word|phrase):"), re.IGNORECASE)\n\n\ndef try_passwordless_ssh(server: str, keyfile: str | None, paramiko: Any = None) -> Any:\n """Attempt to make an ssh connection without a password.\n This is mainly used for requiring password input only once\n when many tunnels may be connected to the same server.\n\n If paramiko is None, the default for the platform is chosen.\n """\n if paramiko is None:\n paramiko = sys.platform == "win32"\n f = _try_passwordless_paramiko if paramiko else _try_passwordless_openssh\n return f(server, keyfile)\n\n\ndef _try_passwordless_openssh(server: str, keyfile: str | None) -> bool:\n """Try passwordless login with shell ssh command."""\n if pexpect is None:\n msg = "pexpect unavailable, use paramiko"\n raise ImportError(msg)\n cmd = "ssh -f " + server\n if keyfile:\n cmd += " -i " + keyfile\n cmd += " exit"\n\n # pop SSH_ASKPASS from env\n env = os.environ.copy()\n env.pop("SSH_ASKPASS", None)\n\n ssh_newkey = "Are you sure you want to continue connecting"\n p = pexpect.spawn(cmd, env=env)\n while True:\n try:\n i = p.expect([ssh_newkey, _password_pat], timeout=0.1)\n if i == 0:\n msg = "The authenticity of the host can't be established."\n raise SSHException(msg)\n except pexpect.TIMEOUT:\n continue\n except pexpect.EOF:\n return True\n else:\n return False\n\n\ndef _try_passwordless_paramiko(server: str, keyfile: str | None) -> bool:\n """Try passwordless login with paramiko."""\n if paramiko is None:\n msg = "Paramiko unavailable, " # type:ignore[unreachable]\n if sys.platform == "win32":\n msg += "Paramiko is required for ssh tunneled connections on Windows."\n else:\n msg += "use OpenSSH."\n raise ImportError(msg)\n username, server, port = _split_server(server)\n client = paramiko.SSHClient()\n client.load_system_host_keys()\n client.set_missing_host_key_policy(paramiko.WarningPolicy())\n try:\n client.connect(server, port, username=username, key_filename=keyfile, look_for_keys=True)\n except paramiko.AuthenticationException:\n return False\n else:\n client.close()\n return True\n\n\ndef tunnel_connection(\n socket: socket.socket,\n addr: str,\n server: str,\n keyfile: str | None = None,\n password: str | None = None,\n paramiko: Any = None,\n timeout: int = 60,\n) -> int:\n """Connect a socket to an address via an ssh tunnel.\n\n This is a wrapper for socket.connect(addr), when addr is not accessible\n from the local machine. It simply creates an ssh tunnel using the remaining args,\n and calls socket.connect('tcp://localhost:lport') where lport is the randomly\n selected local port of the tunnel.\n\n """\n new_url, tunnel = open_tunnel(\n addr,\n server,\n keyfile=keyfile,\n password=password,\n paramiko=paramiko,\n timeout=timeout,\n )\n socket.connect(new_url)\n return tunnel\n\n\ndef open_tunnel(\n addr: str,\n server: str,\n keyfile: str | None = None,\n password: str | None = None,\n paramiko: Any = None,\n timeout: int = 60,\n) -> tuple[str, int]:\n """Open a tunneled connection from a 0MQ url.\n\n For use inside tunnel_connection.\n\n Returns\n -------\n\n (url, tunnel) : (str, object)\n The 0MQ url that has been forwarded, and the tunnel object\n """\n\n lport = select_random_ports(1)[0]\n _, addr = addr.split("://")\n ip, rport = addr.split(":")\n rport_int = int(rport)\n paramiko = sys.platform == "win32" if paramiko is None else paramiko_tunnel\n tunnelf = paramiko_tunnel if paramiko else openssh_tunnel\n\n tunnel = tunnelf(\n lport,\n rport_int,\n server,\n remoteip=ip,\n keyfile=keyfile,\n password=password,\n timeout=timeout,\n )\n return "tcp://127.0.0.1:%i" % lport, cast(int, tunnel)\n\n\ndef openssh_tunnel(\n lport: int,\n rport: int,\n server: str,\n remoteip: str = "127.0.0.1",\n keyfile: str | None = None,\n password: str | None | bool = None,\n timeout: int = 60,\n) -> int:\n """Create an ssh tunnel using command-line ssh that connects port lport\n on this machine to localhost:rport on server. The tunnel\n will automatically close when not in use, remaining open\n for a minimum of timeout seconds for an initial connection.\n\n This creates a tunnel redirecting `localhost:lport` to `remoteip:rport`,\n as seen from `server`.\n\n keyfile and password may be specified, but ssh config is checked for defaults.\n\n Parameters\n ----------\n\n lport : int\n local port for connecting to the tunnel from this machine.\n rport : int\n port on the remote machine to connect to.\n server : str\n The ssh server to connect to. The full ssh server string will be parsed.\n user@server:port\n remoteip : str [Default: 127.0.0.1]\n The remote ip, specifying the destination of the tunnel.\n Default is localhost, which means that the tunnel would redirect\n localhost:lport on this machine to localhost:rport on the *server*.\n\n keyfile : str; path to public key file\n This specifies a key to be used in ssh login, default None.\n Regular default ssh keys will be used without specifying this argument.\n password : str;\n Your ssh password to the ssh server. Note that if this is left None,\n you will be prompted for it if passwordless key based login is unavailable.\n timeout : int [default: 60]\n The time (in seconds) after which no activity will result in the tunnel\n closing. This prevents orphaned tunnels from running forever.\n """\n if pexpect is None:\n msg = "pexpect unavailable, use paramiko_tunnel"\n raise ImportError(msg)\n ssh = "ssh "\n if keyfile:\n ssh += "-i " + keyfile\n\n if ":" in server:\n server, port = server.split(":")\n ssh += " -p %s" % port\n\n cmd = f"{ssh} -O check {server}"\n (output, exitstatus) = pexpect.run(cmd, withexitstatus=True)\n if not exitstatus:\n pid = int(output[output.find(b"(pid=") + 5 : output.find(b")")])\n cmd = "%s -O forward -L 127.0.0.1:%i:%s:%i %s" % (\n ssh,\n lport,\n remoteip,\n rport,\n server,\n )\n (output, exitstatus) = pexpect.run(cmd, withexitstatus=True)\n if not exitstatus:\n atexit.register(_stop_tunnel, cmd.replace("-O forward", "-O cancel", 1))\n return pid\n cmd = "%s -f -S none -L 127.0.0.1:%i:%s:%i %s sleep %i" % (\n ssh,\n lport,\n remoteip,\n rport,\n server,\n timeout,\n )\n\n # pop SSH_ASKPASS from env\n env = os.environ.copy()\n env.pop("SSH_ASKPASS", None)\n\n ssh_newkey = "Are you sure you want to continue connecting"\n tunnel = pexpect.spawn(cmd, env=env)\n failed = False\n while True:\n try:\n i = tunnel.expect([ssh_newkey, _password_pat], timeout=0.1)\n if i == 0:\n msg = "The authenticity of the host can't be established."\n raise SSHException(msg)\n except pexpect.TIMEOUT:\n continue\n except pexpect.EOF as e:\n tunnel.wait()\n if tunnel.exitstatus:\n raise RuntimeError("tunnel '%s' failed to start" % (cmd)) from e\n else:\n return tunnel.pid\n else:\n if failed:\n warnings.warn("Password rejected, try again", stacklevel=2)\n password = None\n if password is None:\n password = getpass("%s's password: " % (server))\n tunnel.sendline(password)\n failed = True\n\n\ndef _stop_tunnel(cmd: Any) -> None:\n pexpect.run(cmd)\n\n\ndef _split_server(server: str) -> tuple[str, str, int]:\n if "@" in server:\n username, server = server.split("@", 1)\n else:\n username = getuser()\n if ":" in server:\n server, port_str = server.split(":")\n port = int(port_str)\n else:\n port = 22\n return username, server, port\n\n\ndef paramiko_tunnel(\n lport: int,\n rport: int,\n server: str,\n remoteip: str = "127.0.0.1",\n keyfile: str | None = None,\n password: str | None = None,\n timeout: float = 60,\n) -> Process:\n """launch a tunner with paramiko in a subprocess. This should only be used\n when shell ssh is unavailable (e.g. Windows).\n\n This creates a tunnel redirecting `localhost:lport` to `remoteip:rport`,\n as seen from `server`.\n\n If you are familiar with ssh tunnels, this creates the tunnel:\n\n ssh server -L localhost:lport:remoteip:rport\n\n keyfile and password may be specified, but ssh config is checked for defaults.\n\n\n Parameters\n ----------\n\n lport : int\n local port for connecting to the tunnel from this machine.\n rport : int\n port on the remote machine to connect to.\n server : str\n The ssh server to connect to. The full ssh server string will be parsed.\n user@server:port\n remoteip : str [Default: 127.0.0.1]\n The remote ip, specifying the destination of the tunnel.\n Default is localhost, which means that the tunnel would redirect\n localhost:lport on this machine to localhost:rport on the *server*.\n\n keyfile : str; path to public key file\n This specifies a key to be used in ssh login, default None.\n Regular default ssh keys will be used without specifying this argument.\n password : str;\n Your ssh password to the ssh server. Note that if this is left None,\n you will be prompted for it if passwordless key based login is unavailable.\n timeout : int [default: 60]\n The time (in seconds) after which no activity will result in the tunnel\n closing. This prevents orphaned tunnels from running forever.\n\n """\n if paramiko is None:\n msg = "Paramiko not available" # type:ignore[unreachable]\n raise ImportError(msg)\n\n if password is None and not _try_passwordless_paramiko(server, keyfile):\n password = getpass("%s's password: " % (server))\n\n p = Process(\n target=_paramiko_tunnel,\n args=(lport, rport, server, remoteip),\n kwargs={"keyfile": keyfile, "password": password},\n )\n p.daemon = True\n p.start()\n return p\n\n\ndef _paramiko_tunnel(\n lport: int,\n rport: int,\n server: str,\n remoteip: str,\n keyfile: str | None = None,\n password: str | None = None,\n) -> None:\n """Function for actually starting a paramiko tunnel, to be passed\n to multiprocessing.Process(target=this), and not called directly.\n """\n username, server, port = _split_server(server)\n client = paramiko.SSHClient()\n client.load_system_host_keys()\n client.set_missing_host_key_policy(paramiko.WarningPolicy())\n\n try:\n client.connect(\n server,\n port,\n username=username,\n key_filename=keyfile,\n look_for_keys=True,\n password=password,\n )\n # except paramiko.AuthenticationException:\n # if password is None:\n # password = getpass("%s@%s's password: "%(username, server))\n # client.connect(server, port, username=username, password=password)\n # else:\n # raise\n except Exception as e:\n warnings.warn("*** Failed to connect to %s:%d: %r" % (server, port, e), stacklevel=2)\n sys.exit(1)\n\n # Don't let SIGINT kill the tunnel subprocess\n signal.signal(signal.SIGINT, signal.SIG_IGN)\n\n try:\n forward_tunnel(lport, remoteip, rport, client.get_transport())\n except KeyboardInterrupt:\n warnings.warn("SIGINT: Port forwarding stopped cleanly", stacklevel=2)\n sys.exit(0)\n except Exception as e:\n warnings.warn("Port forwarding stopped uncleanly: %s" % e, stacklevel=2)\n sys.exit(255)\n\n\nif sys.platform == "win32":\n ssh_tunnel = paramiko_tunnel\nelse:\n ssh_tunnel = openssh_tunnel\n\n\n__all__ = [\n "tunnel_connection",\n "ssh_tunnel",\n "openssh_tunnel",\n "paramiko_tunnel",\n "try_passwordless_ssh",\n]\n | .venv\Lib\site-packages\jupyter_client\ssh\tunnel.py | tunnel.py | Python | 13,795 | 0.95 | 0.150224 | 0.042216 | node-utils | 908 | 2024-08-06T20:52:28.299524 | Apache-2.0 | false | add253b8d4921e6662c987e0c6278c4c |
from .tunnel import * # noqa\n | .venv\Lib\site-packages\jupyter_client\ssh\__init__.py | __init__.py | Python | 30 | 0.75 | 0 | 0 | python-kit | 466 | 2023-08-17T09:05:13.363477 | MIT | false | e5a3813740dcf3752bb3f09e44f3e685 |
\n\n | .venv\Lib\site-packages\jupyter_client\ssh\__pycache__\forward.cpython-313.pyc | forward.cpython-313.pyc | Other | 4,197 | 0.8 | 0.05 | 0 | react-lib | 418 | 2024-11-30T18:19:16.500160 | BSD-3-Clause | false | adab939380a2443256a3056d50918200 |
\n\n | .venv\Lib\site-packages\jupyter_client\ssh\__pycache__\tunnel.cpython-313.pyc | tunnel.cpython-313.pyc | Other | 16,169 | 0.95 | 0.064189 | 0.023166 | node-utils | 879 | 2024-02-14T08:10:40.771618 | BSD-3-Clause | false | 82c0f5ce8adbd7d4c5b677084752bbbb |
\n\n | .venv\Lib\site-packages\jupyter_client\ssh\__pycache__\__init__.cpython-313.pyc | __init__.cpython-313.pyc | Other | 221 | 0.7 | 0 | 0 | node-utils | 696 | 2024-09-01T07:23:19.557668 | GPL-3.0 | false | 1a4a6b7c1f88acd7cf16fdcda39ace5f |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\adapter.cpython-313.pyc | adapter.cpython-313.pyc | Other | 17,706 | 0.95 | 0.019608 | 0.005208 | node-utils | 603 | 2023-08-11T21:26:42.937054 | BSD-3-Clause | false | 9ce9a2050d4a1d2a199f879e31e04b69 |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\channels.cpython-313.pyc | channels.cpython-313.pyc | Other | 15,590 | 0.95 | 0.06962 | 0 | node-utils | 835 | 2025-02-25T01:07:30.156938 | Apache-2.0 | false | 958e5f0d43811be4b1dde67b67db8011 |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\channelsabc.cpython-313.pyc | channelsabc.cpython-313.pyc | Other | 2,429 | 0.85 | 0.128205 | 0 | python-kit | 581 | 2023-09-27T10:48:35.446792 | Apache-2.0 | false | c9d5259cb4725c4d703fc2131616aa32 |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\client.cpython-313.pyc | client.cpython-313.pyc | Other | 36,900 | 0.95 | 0.101124 | 0.01292 | python-kit | 58 | 2023-09-14T07:42:58.231986 | MIT | false | d0c28dfa9e4d8d89f8ad26bc83a0c35d |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\clientabc.cpython-313.pyc | clientabc.cpython-313.pyc | Other | 3,999 | 0.95 | 0.074074 | 0 | python-kit | 780 | 2024-06-29T04:49:26.806693 | Apache-2.0 | false | cf7b7b16701f796f5cad14ac8c334815 |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\connect.cpython-313.pyc | connect.cpython-313.pyc | Other | 29,354 | 0.95 | 0.091954 | 0.003236 | node-utils | 33 | 2024-01-01T03:56:53.630716 | Apache-2.0 | false | 3d921cf28573f2fe36ad84c372c9a877 |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\consoleapp.cpython-313.pyc | consoleapp.cpython-313.pyc | Other | 15,827 | 0.95 | 0.070588 | 0.02649 | vue-tools | 808 | 2023-09-18T03:05:40.921208 | Apache-2.0 | false | a63c3770d115735282fed209358f844a |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\jsonutil.cpython-313.pyc | jsonutil.cpython-313.pyc | Other | 7,100 | 0.95 | 0.033708 | 0 | node-utils | 949 | 2024-12-11T18:20:18.138685 | MIT | false | 0616f2cff0c65f8cca03fba09bca68ed |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\kernelapp.cpython-313.pyc | kernelapp.cpython-313.pyc | Other | 5,762 | 0.8 | 0.017857 | 0.02 | python-kit | 134 | 2024-02-09T23:54:10.682528 | Apache-2.0 | false | 017fb81e4a69ec48083c2171410ae5dc |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\kernelspec.cpython-313.pyc | kernelspec.cpython-313.pyc | Other | 19,731 | 0.95 | 0.074074 | 0 | python-kit | 68 | 2025-02-19T05:07:16.097758 | GPL-3.0 | false | 9ff2663576a5074f227f8da955012074 |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\kernelspecapp.cpython-313.pyc | kernelspecapp.cpython-313.pyc | Other | 16,108 | 0.95 | 0.064748 | 0 | awesome-app | 375 | 2024-08-27T04:57:42.483294 | Apache-2.0 | false | 35c7f928d57b27eb119453353bea8742 |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\launcher.cpython-313.pyc | launcher.cpython-313.pyc | Other | 5,299 | 0.95 | 0.04 | 0.011236 | awesome-app | 160 | 2024-12-12T06:28:49.086739 | GPL-3.0 | false | e45994911609d0cb25fbf061f44db955 |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\localinterfaces.cpython-313.pyc | localinterfaces.cpython-313.pyc | Other | 13,316 | 0.95 | 0.06015 | 0 | awesome-app | 788 | 2023-10-30T14:57:26.391877 | BSD-3-Clause | false | 15ff7e7461c96cf96b5a771f7f919e70 |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\manager.cpython-313.pyc | manager.cpython-313.pyc | Other | 39,848 | 0.95 | 0.051205 | 0.013201 | node-utils | 855 | 2023-12-16T01:30:02.849503 | Apache-2.0 | false | 402be65f80f79efe9f867e8d40fbc293 |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\managerabc.cpython-313.pyc | managerabc.cpython-313.pyc | Other | 2,668 | 0.95 | 0.061538 | 0 | react-lib | 573 | 2024-07-06T08:24:08.553247 | MIT | false | d7df84f4afbcf12b0cf26397a4d37341 |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\multikernelmanager.cpython-313.pyc | multikernelmanager.cpython-313.pyc | Other | 28,015 | 0.95 | 0.081433 | 0.011111 | vue-tools | 377 | 2024-05-07T07:08:09.869474 | BSD-3-Clause | false | 55d4b344cc18cac3d84a887b9c485dbb |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\restarter.cpython-313.pyc | restarter.cpython-313.pyc | Other | 6,815 | 0.8 | 0.078125 | 0 | react-lib | 889 | 2025-02-25T08:52:24.374361 | Apache-2.0 | false | 77f3e4083a4697eae22a0eee3fa5c1dc |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\runapp.cpython-313.pyc | runapp.cpython-313.pyc | Other | 6,406 | 0.8 | 0.040541 | 0 | react-lib | 453 | 2023-12-16T01:32:29.582516 | MIT | false | a806ab5c12820193467e92d8764d7643 |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\session.cpython-313.pyc | session.cpython-313.pyc | Other | 44,186 | 0.95 | 0.067164 | 0.012579 | node-utils | 312 | 2024-07-03T19:26:04.888173 | GPL-3.0 | false | 440a80f99fa6720c051431ce77ef0935 |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\threaded.cpython-313.pyc | threaded.cpython-313.pyc | Other | 16,478 | 0.95 | 0.044444 | 0.036364 | vue-tools | 312 | 2024-07-09T15:43:40.122576 | MIT | false | 5d41442c8cf301f3fd3a5455a225de9f |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\utils.cpython-313.pyc | utils.cpython-313.pyc | Other | 3,669 | 0.95 | 0.095238 | 0 | react-lib | 627 | 2024-04-10T16:28:26.943554 | BSD-3-Clause | false | 9db46a9e785ccebda42ed813b437c912 |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\win_interrupt.cpython-313.pyc | win_interrupt.cpython-313.pyc | Other | 2,120 | 0.8 | 0.066667 | 0.037037 | awesome-app | 878 | 2023-10-15T06:31:58.733707 | MIT | false | e6d91956e7070f6887ebeb6dac1e40f5 |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\_version.cpython-313.pyc | _version.cpython-313.pyc | Other | 977 | 0.8 | 0.142857 | 0 | awesome-app | 47 | 2023-07-24T04:00:08.877253 | Apache-2.0 | false | 68191335a69b7babf4a3a0b67489e73a |
\n\n | .venv\Lib\site-packages\jupyter_client\__pycache__\__init__.cpython-313.pyc | __init__.cpython-313.pyc | Other | 877 | 0.7 | 0 | 0 | awesome-app | 453 | 2024-11-15T06:53:24.704558 | Apache-2.0 | false | f3efea6a9f01179f92f19b076df598ee |
[console_scripts]\njupyter-kernel = jupyter_client.kernelapp:main\njupyter-kernelspec = jupyter_client.kernelspecapp:KernelSpecApp.launch_instance\njupyter-run = jupyter_client.runapp:RunApp.launch_instance\n\n[jupyter_client.kernel_provisioners]\nlocal-provisioner = jupyter_client.provisioning:LocalProvisioner\n | .venv\Lib\site-packages\jupyter_client-8.6.3.dist-info\entry_points.txt | entry_points.txt | Other | 307 | 0.7 | 0 | 0 | awesome-app | 29 | 2025-06-17T18:29:16.616654 | MIT | false | 54e3ef3c1a2cacf82ec473b8847bb643 |
pip\n | .venv\Lib\site-packages\jupyter_client-8.6.3.dist-info\INSTALLER | INSTALLER | Other | 4 | 0.5 | 0 | 0 | react-lib | 920 | 2024-02-23T06:42:40.383155 | BSD-3-Clause | false | 365c9bfeb7d89244f2ce01c1de44cb85 |
Metadata-Version: 2.3\nName: jupyter_client\nVersion: 8.6.3\nSummary: Jupyter protocol implementation and client libraries\nProject-URL: Homepage, https://jupyter.org\nProject-URL: Documentation, https://jupyter-client.readthedocs.io/\nProject-URL: Source, https://github.com/jupyter/jupyter_client\nAuthor-email: Jupyter Development Team <jupyter@googlegroups.com>\nLicense: BSD 3-Clause License\n \n - Copyright (c) 2001-2015, IPython Development Team\n - Copyright (c) 2015-, Jupyter Development Team\n \n All rights reserved.\n \n Redistribution and use in source and binary forms, with or without\n modification, are permitted provided that the following conditions are met:\n \n 1. Redistributions of source code must retain the above copyright notice, this\n list of conditions and the following disclaimer.\n \n 2. Redistributions in binary form must reproduce the above copyright notice,\n this list of conditions and the following disclaimer in the documentation\n and/or other materials provided with the distribution.\n \n 3. Neither the name of the copyright holder nor the names of its\n contributors may be used to endorse or promote products derived from\n this software without specific prior written permission.\n \n THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"\n AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\nLicense-File: LICENSE\nKeywords: Interactive,Interpreter,Shell,Web\nClassifier: Framework :: Jupyter\nClassifier: Intended Audience :: Developers\nClassifier: Intended Audience :: Education\nClassifier: Intended Audience :: Science/Research\nClassifier: Intended Audience :: System Administrators\nClassifier: License :: OSI Approved :: BSD License\nClassifier: Operating System :: OS Independent\nClassifier: Programming Language :: Python\nClassifier: Programming Language :: Python :: 3\nRequires-Python: >=3.8\nRequires-Dist: importlib-metadata>=4.8.3; python_version < '3.10'\nRequires-Dist: jupyter-core!=5.0.*,>=4.12\nRequires-Dist: python-dateutil>=2.8.2\nRequires-Dist: pyzmq>=23.0\nRequires-Dist: tornado>=6.2\nRequires-Dist: traitlets>=5.3\nProvides-Extra: docs\nRequires-Dist: ipykernel; extra == 'docs'\nRequires-Dist: myst-parser; extra == 'docs'\nRequires-Dist: pydata-sphinx-theme; extra == 'docs'\nRequires-Dist: sphinx-autodoc-typehints; extra == 'docs'\nRequires-Dist: sphinx>=4; extra == 'docs'\nRequires-Dist: sphinxcontrib-github-alt; extra == 'docs'\nRequires-Dist: sphinxcontrib-spelling; extra == 'docs'\nProvides-Extra: test\nRequires-Dist: coverage; extra == 'test'\nRequires-Dist: ipykernel>=6.14; extra == 'test'\nRequires-Dist: mypy; extra == 'test'\nRequires-Dist: paramiko; (sys_platform == 'win32') and extra == 'test'\nRequires-Dist: pre-commit; extra == 'test'\nRequires-Dist: pytest-cov; extra == 'test'\nRequires-Dist: pytest-jupyter[client]>=0.4.1; extra == 'test'\nRequires-Dist: pytest-timeout; extra == 'test'\nRequires-Dist: pytest<8.2.0; extra == 'test'\nDescription-Content-Type: text/markdown\n\n# Jupyter Client\n\n[](https://github.com/jupyter/jupyter_client/actions)\n[](http://jupyter-client.readthedocs.io/en/latest/?badge=latest)\n\n`jupyter_client` contains the reference implementation of the [Jupyter protocol].\nIt also provides client and kernel management APIs for working with kernels.\n\nIt also provides the `jupyter kernelspec` entrypoint\nfor installing kernelspecs for use with Jupyter frontends.\n\n## Development Setup\n\nThe [Jupyter Contributor Guides](https://jupyter.readthedocs.io/en/latest/contributing/content-contributor.html) provide extensive information on contributing code or documentation to Jupyter projects. The limited instructions below for setting up a development environment are for your convenience.\n\n## Coding\n\nYou'll need Python and `pip` on the search path. Clone the Jupyter Client git repository to your computer, for example in `/my/project/jupyter_client`\n\n```bash\ncd /my/projects/\ngit clone git@github.com:jupyter/jupyter_client.git\n```\n\nNow create an [editable install](https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs)\nand download the dependencies of code and test suite by executing:\n\n```bash\ncd /my/projects/jupyter_client/\npip install -e ".[test]"\npytest\n```\n\nThe last command runs the test suite to verify the setup. During development, you can pass filenames to `pytest`, and it will execute only those tests.\n\n## Documentation\n\nThe documentation of Jupyter Client is generated from the files in `docs/` using Sphinx. Instructions for setting up Sphinx with a selection of optional modules are in the [Documentation Guide](https://jupyter.readthedocs.io/en/latest/contributing/docs-contributions/index.html). You'll also need the `make` command.\nFor a minimal Sphinx installation to process the Jupyter Client docs, execute:\n\n```bash\npip install ".[doc]"\n```\n\nThe following commands build the documentation in HTML format and check for broken links:\n\n```bash\ncd /my/projects/jupyter_client/docs/\nmake html linkcheck\n```\n\nPoint your browser to the following URL to access the generated documentation:\n\n_file:///my/projects/jupyter_client/docs/\_build/html/index.html_\n\n## Contributing\n\n`jupyter-client` has adopted automatic code formatting so you shouldn't\nneed to worry too much about your code style.\nAs long as your code is valid,\nthe pre-commit hook should take care of how it should look.\nYou can invoke the pre-commit hook by hand at any time with:\n\n```bash\npre-commit run\n```\n\nwhich should run any autoformatting on your code\nand tell you about any errors it couldn't fix automatically.\nYou may also install [black integration](https://black.readthedocs.io/en/stable/integrations/editors.html)\ninto your text editor to format code automatically.\n\nIf you have already committed files before setting up the pre-commit\nhook with `pre-commit install`, you can fix everything up using\n`pre-commit run --all-files`. You need to make the fixing commit\nyourself after that.\n\nSome of the hooks only run on CI by default, but you can invoke them by\nrunning with the `--hook-stage manual` argument.\n\n## About the Jupyter Development Team\n\nThe Jupyter Development Team is the set of all contributors to the Jupyter project.\nThis includes all of the Jupyter subprojects.\n\nThe core team that coordinates development on GitHub can be found here:\nhttps://github.com/jupyter/.\n\n## Our Copyright Policy\n\nJupyter uses a shared copyright model. Each contributor maintains copyright\nover their contributions to Jupyter. But, it is important to note that these\ncontributions are typically only changes to the repositories. Thus, the Jupyter\nsource code, in its entirety is not the copyright of any single person or\ninstitution. Instead, it is the collective copyright of the entire Jupyter\nDevelopment Team. If individual contributors want to maintain a record of what\nchanges/contributions they have specific copyright on, they should indicate\ntheir copyright in the commit message of the change, when they commit the\nchange to one of the Jupyter repositories.\n\nWith this in mind, the following banner should be used in any source code file\nto indicate the copyright and license terms:\n\n```\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\n```\n\n[jupyter protocol]: https://jupyter-client.readthedocs.io/en/latest/messaging.html\n | .venv\Lib\site-packages\jupyter_client-8.6.3.dist-info\METADATA | METADATA | Other | 8,311 | 0.95 | 0.043011 | 0.061644 | react-lib | 445 | 2024-03-27T09:50:57.112458 | Apache-2.0 | false | 3539f990bda44518ec5e73537347eefd |
../../Scripts/jupyter-kernel.exe,sha256=gqA2ZXJ7Rcc8W67-S3MYPECLFes50HO-jvyNdGY-GSU,108425\n../../Scripts/jupyter-kernelspec.exe,sha256=qZZlEQ6bM1m_0V3sruS96HqMCnAmgLoMTbtp4MQLing,108463\n../../Scripts/jupyter-run.exe,sha256=j0huoYxn6wSVmZuhbpoWMZVZYjasQV8R07Dovj8vHOI,108442\njupyter_client-8.6.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4\njupyter_client-8.6.3.dist-info/METADATA,sha256=A8nUw-nMO4ykef5ypCSTEX5HyKDH9a7WCDA6Vp4Zzbs,8311\njupyter_client-8.6.3.dist-info/RECORD,,\njupyter_client-8.6.3.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87\njupyter_client-8.6.3.dist-info/entry_points.txt,sha256=j1yiV6Ixc2dWAb_O2sV1p001Z3C-CV9cZSfda3Rvoqo,307\njupyter_client-8.6.3.dist-info/licenses/LICENSE,sha256=XKdOTS7rkzCw0SnCX4dNNUShNBO8Yq6NNngZEA0JUHI,1588\njupyter_client/__init__.py,sha256=D2P98t2OtHEnRnE8M9MTBaQ9qzI_ZJqLzfxAWZy_XVI,539\njupyter_client/__pycache__/__init__.cpython-313.pyc,,\njupyter_client/__pycache__/_version.cpython-313.pyc,,\njupyter_client/__pycache__/adapter.cpython-313.pyc,,\njupyter_client/__pycache__/channels.cpython-313.pyc,,\njupyter_client/__pycache__/channelsabc.cpython-313.pyc,,\njupyter_client/__pycache__/client.cpython-313.pyc,,\njupyter_client/__pycache__/clientabc.cpython-313.pyc,,\njupyter_client/__pycache__/connect.cpython-313.pyc,,\njupyter_client/__pycache__/consoleapp.cpython-313.pyc,,\njupyter_client/__pycache__/jsonutil.cpython-313.pyc,,\njupyter_client/__pycache__/kernelapp.cpython-313.pyc,,\njupyter_client/__pycache__/kernelspec.cpython-313.pyc,,\njupyter_client/__pycache__/kernelspecapp.cpython-313.pyc,,\njupyter_client/__pycache__/launcher.cpython-313.pyc,,\njupyter_client/__pycache__/localinterfaces.cpython-313.pyc,,\njupyter_client/__pycache__/manager.cpython-313.pyc,,\njupyter_client/__pycache__/managerabc.cpython-313.pyc,,\njupyter_client/__pycache__/multikernelmanager.cpython-313.pyc,,\njupyter_client/__pycache__/restarter.cpython-313.pyc,,\njupyter_client/__pycache__/runapp.cpython-313.pyc,,\njupyter_client/__pycache__/session.cpython-313.pyc,,\njupyter_client/__pycache__/threaded.cpython-313.pyc,,\njupyter_client/__pycache__/utils.cpython-313.pyc,,\njupyter_client/__pycache__/win_interrupt.cpython-313.pyc,,\njupyter_client/_version.py,sha256=90PEVfmVy00MFmR0IVvxzaBBsQvBc3rVV5II3ODEhP8,577\njupyter_client/adapter.py,sha256=FVUiBE61CiWiS8tmC3sQ2v0N4IIYz8TP7Pt2AAhKGqw,14381\njupyter_client/asynchronous/__init__.py,sha256=HQm-emcZbqMSBPACOEHtLHWp9MTH4Mo_Tk6SsFdAaos,46\njupyter_client/asynchronous/__pycache__/__init__.cpython-313.pyc,,\njupyter_client/asynchronous/__pycache__/client.cpython-313.pyc,,\njupyter_client/asynchronous/client.py,sha256=dXSCsp5hYRSrYSifcq7c6VyKtUYwn0FLd5a4FXgIr2I,2870\njupyter_client/blocking/__init__.py,sha256=M46ubrTP92sKF5IgZt8_6BhMt9AjjreEY_81N4gtRiQ,49\njupyter_client/blocking/__pycache__/__init__.cpython-313.pyc,,\njupyter_client/blocking/__pycache__/client.cpython-313.pyc,,\njupyter_client/blocking/client.py,sha256=mKI5yz3-u1t1iAysnmSrSH5a8HFubw3T3Xv3g1cwg2c,2742\njupyter_client/channels.py,sha256=0MrErnQ7IbUYJZcwZ7CjQzhrZCHrNxAWBOzTHcqXS3M,10835\njupyter_client/channelsabc.py,sha256=Nsv5b8KXvWeDnVaCRUlpphclGPbmEcxslLwuvURA_KM,1177\njupyter_client/client.py,sha256=T5fgCQDsZ0WqjxlJo0zE15vf1q-hUjhVDmmbyg05ZCU,30848\njupyter_client/clientabc.py,sha256=C50RXZ5sCOem1XtfObNdBgOvEzMf2It5aFtBN1nwz6s,2776\njupyter_client/connect.py,sha256=oCwcGkNVwU18qYYuu1oxehITqWjGdwwARNqDp1US6S8,25340\njupyter_client/consoleapp.py,sha256=yVo-NWJBvaW-21UI6NXhHPpCzF18kHkGiKI2ctvmcPE,13913\njupyter_client/ioloop/__init__.py,sha256=sw5d6i2whm-2i76wmY2PC90U1Z4qskTKieZ6rWb59Zo,214\njupyter_client/ioloop/__pycache__/__init__.cpython-313.pyc,,\njupyter_client/ioloop/__pycache__/manager.cpython-313.pyc,,\njupyter_client/ioloop/__pycache__/restarter.cpython-313.pyc,,\njupyter_client/ioloop/manager.py,sha256=v-vV62J3sqXQlsKFz2xcJSxOVAkyZrE84r3cNeYS55U,4162\njupyter_client/ioloop/restarter.py,sha256=8BAlyCXjkDCA2QxBho8hB942ONLYfqk0BPN3iZ89v_c,3906\njupyter_client/jsonutil.py,sha256=P-aIJwRmWyClhxRG7_CvYCKrIZlWF0osaXVTquh0FxE,6039\njupyter_client/kernelapp.py,sha256=0iFr2PBwWS18p6iQ-TIS8bmKtDH7hfycA-dja1t0y0g,2941\njupyter_client/kernelspec.py,sha256=BzSmn5dcnib51U5zwtHV7Mb_R1QxRKjtcB1GJczFY0k,15663\njupyter_client/kernelspecapp.py,sha256=LN75jgzJp9HlHBsqvm3M9jgpzIkkanL5bWHtniznYXs,12048\njupyter_client/launcher.py,sha256=Pdm4b4n7gDgXouS3SoOI5d0b1sQGFtr5f9nOPtlJ5G8,6443\njupyter_client/localinterfaces.py,sha256=FcdhUAa8hAjP7ootmEmlsQ1KEKcPsBiO7OTEG9M6zMw,9733\njupyter_client/manager.py,sha256=HnH8ex2txIJ3C-1NSY9ZIcJmdUBv_wVVJtWlSqr7xSY,30322\njupyter_client/managerabc.py,sha256=Y91q7iJYYGJoLQrbu9476AvzQHyUSHIwB-0zVWGAD3s,1490\njupyter_client/multikernelmanager.py,sha256=X6fZAlowRPaQechh0wxlI0pX7EscHSihKr0zDGcAwEw,22601\njupyter_client/provisioning/__init__.py,sha256=K2Jt-TExYmhUkjq00HAdE5KhNyapGdMFJ_qLLckpnEY,170\njupyter_client/provisioning/__pycache__/__init__.cpython-313.pyc,,\njupyter_client/provisioning/__pycache__/factory.cpython-313.pyc,,\njupyter_client/provisioning/__pycache__/local_provisioner.cpython-313.pyc,,\njupyter_client/provisioning/__pycache__/provisioner_base.cpython-313.pyc,,\njupyter_client/provisioning/factory.py,sha256=NaSl5bbbXagyu1Kq07zlrVugmK2fikyKDkCCiNvTy_M,9651\njupyter_client/provisioning/local_provisioner.py,sha256=YJ6Z9dsDr1ZtZIsFVEzDgjWS98qzwrAhpyCqIiB0OZM,10055\njupyter_client/provisioning/provisioner_base.py,sha256=wGAru7yYqaprWwlz43BUiaFAYmr5rK_E4fwhmWyGZK8,9970\njupyter_client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\njupyter_client/restarter.py,sha256=CVbXa1OlM8SPmkDvpfH9VgaavvEcBZjb8lhtTqXIpU8,5852\njupyter_client/runapp.py,sha256=cnbKsXMPSw93vftBEBpFyp9oPzpr_AZSbmJwHHhHdXM,4684\njupyter_client/session.py,sha256=CGsmRKTszgYqHwcjv_skzsgLUD6UAcJgT2FEGDAwn4g,37774\njupyter_client/ssh/__init__.py,sha256=878SVB5fbhnfdn8CIpJ5fVXxURQv6-GNwp6Zgbubi-0,30\njupyter_client/ssh/__pycache__/__init__.cpython-313.pyc,,\njupyter_client/ssh/__pycache__/forward.cpython-313.pyc,,\njupyter_client/ssh/__pycache__/tunnel.cpython-313.pyc,,\njupyter_client/ssh/forward.py,sha256=8lYZHKp6L3YqDaAdxLyh1A8Ug_L7uf1hU-R5C1_Z1UI,3560\njupyter_client/ssh/tunnel.py,sha256=Mdh1Tp_KLnOR2Tnb1VdZ4HuIKHamX06umCx7Z20A_Mc,13795\njupyter_client/threaded.py,sha256=nzUlEXTu8OFKs8kkvOINWzEaP1g4oQ_lRhSTKEp2M3Y,11283\njupyter_client/utils.py,sha256=ylG_OrqyGalW6tFt-3GrsIOB8dObacmbMWQOiRWx9GE,3178\njupyter_client/win_interrupt.py,sha256=-8lqzBiDlbx4oz0b_Dg_OD209dSJoYz5Ehnbqhuojrc,1516\n | .venv\Lib\site-packages\jupyter_client-8.6.3.dist-info\RECORD | RECORD | Other | 6,506 | 0.7 | 0 | 0 | python-kit | 110 | 2024-07-14T06:54:19.681283 | GPL-3.0 | false | 7a1ac0bfba3ec75f2bfce686f5c9f70b |
Wheel-Version: 1.0\nGenerator: hatchling 1.25.0\nRoot-Is-Purelib: true\nTag: py3-none-any\n | .venv\Lib\site-packages\jupyter_client-8.6.3.dist-info\WHEEL | WHEEL | Other | 87 | 0.5 | 0 | 0 | vue-tools | 388 | 2023-07-27T17:02:00.668116 | BSD-3-Clause | false | 52adfa0c417902ee8f0c3d1ca2372ac3 |
BSD 3-Clause License\n\n- Copyright (c) 2001-2015, IPython Development Team\n- Copyright (c) 2015-, Jupyter Development Team\n\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are met:\n\n1. Redistributions of source code must retain the above copyright notice, this\n list of conditions and the following disclaimer.\n\n2. Redistributions in binary form must reproduce the above copyright notice,\n this list of conditions and the following disclaimer in the documentation\n and/or other materials provided with the distribution.\n\n3. Neither the name of the copyright holder nor the names of its\n contributors may be used to endorse or promote products derived from\n this software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"\nAND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\nIMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\nDISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\nFOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\nDAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\nSERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\nCAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\nOR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n | .venv\Lib\site-packages\jupyter_client-8.6.3.dist-info\licenses\LICENSE | LICENSE | Other | 1,588 | 0.7 | 0 | 0 | awesome-app | 209 | 2024-07-09T21:17:52.057142 | MIT | false | 083556a9912a35360dae8281fb57e886 |
""" A minimal application using the ZMQ-based terminal IPython frontend.\n\nThis is not a complete console app, as subprocess will not be able to receive\ninput, there is no real readline support, among other limitations.\n"""\n\n# Copyright (c) IPython Development Team.\n# Distributed under the terms of the Modified BSD License.\n\nfrom __future__ import print_function\n\nimport signal\nimport sys\n\nfrom traitlets import (\n Dict, Any\n)\nfrom traitlets.config import catch_config_error, boolean_flag\n\nfrom jupyter_core.application import JupyterApp, base_aliases, base_flags\nfrom jupyter_client.consoleapp import (\n JupyterConsoleApp, app_aliases, app_flags,\n )\n\nfrom jupyter_console.ptshell import ZMQTerminalInteractiveShell\nfrom jupyter_console import __version__\n\n#-----------------------------------------------------------------------------\n# Globals\n#-----------------------------------------------------------------------------\n\n_examples = """\njupyter console # start the ZMQ-based console\njupyter console --existing # connect to an existing ipython session\n"""\n\n#-----------------------------------------------------------------------------\n# Flags and Aliases\n#-----------------------------------------------------------------------------\n\n# copy flags from mixin:\nflags = dict(base_flags)\n# start with mixin frontend flags:\n# update full dict with frontend flags:\nflags.update(app_flags)\nflags.update(boolean_flag(\n 'simple-prompt', 'ZMQTerminalInteractiveShell.simple_prompt',\n "Force simple minimal prompt using `raw_input`",\n "Use a rich interactive prompt with prompt_toolkit"\n))\n\n# copy flags from mixin\naliases = dict(base_aliases)\n\naliases.update(app_aliases)\n\nfrontend_aliases = set(app_aliases.keys())\nfrontend_flags = set(app_flags.keys())\n\n\n#-----------------------------------------------------------------------------\n# Classes\n#-----------------------------------------------------------------------------\n\n\nclass ZMQTerminalIPythonApp(JupyterApp, JupyterConsoleApp): # type:ignore[misc]\n name = "jupyter-console"\n version = __version__\n """Start a terminal frontend to the IPython zmq kernel."""\n\n description = """\n The Jupyter terminal-based Console.\n\n This launches a Console application inside a terminal.\n\n The Console supports various extra features beyond the traditional\n single-process Terminal IPython shell, such as connecting to an\n existing ipython session, via:\n\n jupyter console --existing\n\n where the previous session could have been created by another ipython\n console, an ipython qtconsole, or by opening an ipython notebook.\n\n """\n examples = _examples\n\n classes = [ZMQTerminalInteractiveShell] + JupyterConsoleApp.classes # type:ignore[operator]\n flags = Dict(flags) # type:ignore[assignment]\n aliases = Dict(aliases) # type:ignore[assignment]\n frontend_aliases = Any(frontend_aliases)\n frontend_flags = Any(frontend_flags)\n\n subcommands = Dict()\n\n force_interact = True\n\n def parse_command_line(self, argv=None):\n super(ZMQTerminalIPythonApp, self).parse_command_line(argv)\n self.build_kernel_argv(self.extra_args)\n\n def init_shell(self):\n JupyterConsoleApp.initialize(self)\n # relay sigint to kernel\n signal.signal(signal.SIGINT, self.handle_sigint)\n self.shell = ZMQTerminalInteractiveShell.instance(parent=self,\n manager=self.kernel_manager,\n client=self.kernel_client,\n confirm_exit=self.confirm_exit,\n )\n self.shell.own_kernel = not self.existing\n\n def init_gui_pylab(self):\n # no-op, because we don't want to import matplotlib in the frontend.\n pass\n\n def handle_sigint(self, *args):\n if self.shell._executing:\n if self.kernel_manager:\n self.kernel_manager.interrupt_kernel()\n else:\n print("ERROR: Cannot interrupt kernels we didn't start.",\n file = sys.stderr)\n else:\n # raise the KeyboardInterrupt if we aren't waiting for execution,\n # so that the interact loop advances, and prompt is redrawn, etc.\n raise KeyboardInterrupt\n\n @catch_config_error\n def initialize(self, argv=None):\n """Do actions after construct, but before starting the app."""\n super(ZMQTerminalIPythonApp, self).initialize(argv)\n if self._dispatching:\n return\n # create the shell\n self.init_shell()\n # and draw the banner\n self.init_banner()\n\n def init_banner(self):\n """optionally display the banner"""\n self.shell.show_banner()\n\n def start(self):\n # JupyterApp.start dispatches on NoStart\n super(ZMQTerminalIPythonApp, self).start()\n self.log.debug("Starting the jupyter console mainloop...")\n self.shell.mainloop()\n\n\nmain = launch_new_instance = ZMQTerminalIPythonApp.launch_instance\n\n\nif __name__ == '__main__':\n main()\n\n | .venv\Lib\site-packages\jupyter_console\app.py | app.py | Python | 5,054 | 0.95 | 0.089744 | 0.188034 | awesome-app | 593 | 2024-01-26T11:25:56.258507 | Apache-2.0 | false | 1f31823404071596478529447930dffe |
# -*- coding: utf-8 -*-\n"""Adapt readline completer interface to make ZMQ request."""\n\n# Copyright (c) IPython Development Team.\n# Distributed under the terms of the Modified BSD License.\n\nfrom traitlets.config import Configurable\nfrom traitlets import Float\n\nfrom jupyter_console.utils import run_sync\n\n\nclass ZMQCompleter(Configurable):\n """Client-side completion machinery.\n\n How it works: self.complete will be called multiple times, with\n state=0,1,2,... When state=0 it should compute ALL the completion matches,\n and then return them for each value of state."""\n\n timeout = Float(5.0, config=True, help='timeout before completion abort')\n \n def __init__(self, shell, client, config=None):\n super(ZMQCompleter,self).__init__(config=config)\n\n self.shell = shell\n self.client = client\n self.matches = []\n \n def complete_request(self, code, cursor_pos):\n # send completion request to kernel\n # Give the kernel up to 5s to respond\n msg_id = self.client.complete(\n code=code,\n cursor_pos=cursor_pos,\n )\n \n msg = run_sync(self.client.shell_channel.get_msg)(timeout=self.timeout)\n if msg['parent_header']['msg_id'] == msg_id:\n return msg['content']\n\n return {'matches': [], 'cursor_start': 0, 'cursor_end': 0,\n 'metadata': {}, 'status': 'ok'}\n\n | .venv\Lib\site-packages\jupyter_console\completer.py | completer.py | Python | 1,399 | 0.95 | 0.116279 | 0.166667 | python-kit | 131 | 2025-04-05T00:58:22.613656 | GPL-3.0 | false | e60e805052c14a98626cef76c9446d75 |
"""IPython terminal interface using prompt_toolkit in place of readline"""\nfrom __future__ import print_function\n\nimport asyncio\nimport base64\nimport errno\nfrom getpass import getpass\nfrom io import BytesIO\nimport os\nfrom queue import Empty\nimport signal\nimport subprocess\nimport sys\nfrom tempfile import TemporaryDirectory\nimport time\nfrom warnings import warn\n\nfrom typing import Dict as DictType, Any as AnyType\n\nfrom zmq import ZMQError\nfrom IPython.core import page\nfrom traitlets import (\n Bool,\n Integer,\n Float,\n Unicode,\n List,\n Dict,\n Enum,\n Instance,\n Any,\n)\nfrom traitlets.config import SingletonConfigurable\n\nfrom .completer import ZMQCompleter\nfrom .zmqhistory import ZMQHistoryManager\nfrom . import __version__\n\n# Discriminate version3 for asyncio\nfrom prompt_toolkit import __version__ as ptk_version\nPTK3 = ptk_version.startswith('3.')\n\nif not PTK3:\n # use_ayncio_event_loop obsolete in PKT3\n from prompt_toolkit.eventloop.defaults import use_asyncio_event_loop\n\nfrom prompt_toolkit.completion import Completer, Completion\nfrom prompt_toolkit.document import Document\nfrom prompt_toolkit.enums import DEFAULT_BUFFER, EditingMode\nfrom prompt_toolkit.filters import (\n Condition,\n has_focus,\n has_selection,\n vi_insert_mode,\n emacs_insert_mode,\n is_done,\n)\nfrom prompt_toolkit.history import InMemoryHistory\nfrom prompt_toolkit.shortcuts.prompt import PromptSession\nfrom prompt_toolkit.shortcuts import print_formatted_text, CompleteStyle\nfrom prompt_toolkit.key_binding import KeyBindings\nfrom prompt_toolkit.lexers import PygmentsLexer\nfrom prompt_toolkit.layout.processors import (\n ConditionalProcessor,\n HighlightMatchingBracketProcessor,\n)\nfrom prompt_toolkit.styles import merge_styles\nfrom prompt_toolkit.styles.pygments import (style_from_pygments_cls,\n style_from_pygments_dict)\nfrom prompt_toolkit.formatted_text import PygmentsTokens\nfrom prompt_toolkit.output import ColorDepth\nfrom prompt_toolkit.utils import suspend_to_background_supported\n\nfrom pygments.styles import get_style_by_name\nfrom pygments.lexers import get_lexer_by_name\nfrom pygments.util import ClassNotFound\nfrom pygments.token import Token\n\nfrom jupyter_console.utils import run_sync, ensure_async\n\n\ndef ask_yes_no(prompt, default=None, interrupt=None):\n """Asks a question and returns a boolean (y/n) answer.\n\n If default is given (one of 'y','n'), it is used if the user input is\n empty. If interrupt is given (one of 'y','n'), it is used if the user\n presses Ctrl-C. Otherwise the question is repeated until an answer is\n given.\n\n An EOF is treated as the default answer. If there is no default, an\n exception is raised to prevent infinite loops.\n\n Valid answers are: y/yes/n/no (match is not case sensitive)."""\n\n answers = {'y': True, 'n': False, 'yes': True, 'no': False}\n ans = None\n while ans not in answers.keys():\n try:\n ans = input(prompt + ' ').lower()\n if not ans: # response was an empty string\n ans = default\n except KeyboardInterrupt:\n if interrupt:\n ans = interrupt\n except EOFError:\n if default in answers.keys():\n ans = default\n print()\n else:\n raise\n\n return answers[ans]\n\n\nasync def async_input(prompt, loop=None):\n """Simple async version of input using a the default executor"""\n if loop is None:\n loop = asyncio.get_event_loop()\n\n raw = await loop.run_in_executor(None, input, prompt)\n return raw\n\n\ndef get_pygments_lexer(name):\n name = name.lower()\n if name == 'ipython2':\n from IPython.lib.lexers import IPythonLexer\n return IPythonLexer\n elif name == 'ipython3':\n from IPython.lib.lexers import IPython3Lexer\n return IPython3Lexer\n else:\n try:\n return get_lexer_by_name(name).__class__\n except ClassNotFound:\n warn("No lexer found for language %r. Treating as plain text." % name)\n from pygments.lexers.special import TextLexer\n return TextLexer\n\n\nclass JupyterPTCompleter(Completer):\n """Adaptor to provide kernel completions to prompt_toolkit"""\n def __init__(self, jup_completer):\n self.jup_completer = jup_completer\n\n def get_completions(self, document, complete_event):\n if not document.current_line.strip():\n return\n\n content = self.jup_completer.complete_request(\n code=document.text,\n cursor_pos=document.cursor_position\n )\n meta = content["metadata"]\n\n if "_jupyter_types_experimental" in meta:\n try:\n new_meta = {}\n for c, m in zip(\n content["matches"], meta["_jupyter_types_experimental"]\n ):\n new_meta[c] = m["type"]\n meta = new_meta\n except Exception:\n pass\n\n start_pos = content["cursor_start"] - document.cursor_position\n for m in content["matches"]:\n yield Completion(\n m,\n start_position=start_pos,\n display_meta=meta.get(m, "?"),\n )\n\n\nclass ZMQTerminalInteractiveShell(SingletonConfigurable):\n readline_use = False\n\n pt_cli = None\n\n _executing = False\n _execution_state = Unicode('')\n _pending_clearoutput = False\n _eventloop = None\n own_kernel = False # Changed by ZMQTerminalIPythonApp\n\n editing_mode = Unicode('emacs', config=True,\n help="Shortcut style to use at the prompt. 'vi' or 'emacs'.",\n )\n\n highlighting_style = Unicode('', config=True,\n help="The name of a Pygments style to use for syntax highlighting"\n )\n\n highlighting_style_overrides = Dict(config=True,\n help="Override highlighting format for specific tokens"\n )\n\n true_color = Bool(False, config=True,\n help=("Use 24bit colors instead of 256 colors in prompt highlighting. "\n "If your terminal supports true color, the following command "\n "should print 'TRUECOLOR' in orange: "\n "printf \"\\x1b[38;2;255;100;0mTRUECOLOR\\x1b[0m\\n\"")\n )\n\n history_load_length = Integer(1000, config=True,\n help="How many history items to load into memory"\n )\n\n banner = Unicode('Jupyter console {version}\n\n{kernel_banner}', config=True,\n help=("Text to display before the first prompt. Will be formatted with "\n "variables {version} and {kernel_banner}.")\n )\n\n kernel_timeout = Float(60, config=True,\n help="""Timeout for giving up on a kernel (in seconds).\n\n On first connect and restart, the console tests whether the\n kernel is running and responsive by sending kernel_info_requests.\n This sets the timeout in seconds for how long the kernel can take\n before being presumed dead.\n """\n )\n\n image_handler = Enum(('PIL', 'stream', 'tempfile', 'callable'),\n 'PIL', config=True, allow_none=True, help=\n """\n Handler for image type output. This is useful, for example,\n when connecting to the kernel in which pylab inline backend is\n activated. There are four handlers defined. 'PIL': Use\n Python Imaging Library to popup image; 'stream': Use an\n external program to show the image. Image will be fed into\n the STDIN of the program. You will need to configure\n `stream_image_handler`; 'tempfile': Use an external program to\n show the image. Image will be saved in a temporally file and\n the program is called with the temporally file. You will need\n to configure `tempfile_image_handler`; 'callable': You can set\n any Python callable which is called with the image data. You\n will need to configure `callable_image_handler`.\n """\n )\n\n stream_image_handler = List(config=True, help=\n """\n Command to invoke an image viewer program when you are using\n 'stream' image handler. This option is a list of string where\n the first element is the command itself and reminders are the\n options for the command. Raw image data is given as STDIN to\n the program.\n """\n )\n\n tempfile_image_handler = List(config=True, help=\n """\n Command to invoke an image viewer program when you are using\n 'tempfile' image handler. This option is a list of string\n where the first element is the command itself and reminders\n are the options for the command. You can use {file} and\n {format} in the string to represent the location of the\n generated image file and image format.\n """\n )\n\n callable_image_handler = Any(\n config=True,\n help="""\n Callable object called via 'callable' image handler with one\n argument, `data`, which is `msg["content"]["data"]` where\n `msg` is the message from iopub channel. For example, you can\n find base64 encoded PNG data as `data['image/png']`. If your function\n can't handle the data supplied, it should return `False` to indicate\n this.\n """\n )\n\n mime_preference = List(\n default_value=['image/png', 'image/jpeg', 'image/svg+xml'],\n config=True, help=\n """\n Preferred object representation MIME type in order. First\n matched MIME type will be used.\n """\n )\n\n use_kernel_is_complete = Bool(True, config=True,\n help="""Whether to use the kernel's is_complete message\n handling. If False, then the frontend will use its\n own is_complete handler.\n """\n )\n kernel_is_complete_timeout = Float(1, config=True,\n help="""Timeout (in seconds) for giving up on a kernel's is_complete\n response.\n\n If the kernel does not respond at any point within this time,\n the kernel will no longer be asked if code is complete, and the\n console will default to the built-in is_complete test.\n """\n )\n\n # This is configurable on JupyterConsoleApp; this copy is not configurable\n # to avoid a duplicate config option.\n confirm_exit = Bool(True,\n help="""Set to display confirmation dialog on exit.\n You can always use 'exit' or 'quit', to force a\n direct exit without any confirmation.\n """,\n )\n\n display_completions = Enum(\n ("column", "multicolumn", "readlinelike"),\n help=(\n "Options for displaying tab completions, 'column', 'multicolumn', and "\n "'readlinelike'. These options are for `prompt_toolkit`, see "\n "`prompt_toolkit` documentation for more information."\n ),\n default_value="multicolumn",\n ).tag(config=True)\n\n prompt_includes_vi_mode = Bool(True,\n help="Display the current vi mode (when using vi editing mode)."\n ).tag(config=True)\n\n highlight_matching_brackets = Bool(True, help="Highlight matching brackets.",).tag(\n config=True\n )\n\n manager = Instance("jupyter_client.KernelManager", allow_none=True)\n client = Instance("jupyter_client.KernelClient", allow_none=True)\n\n def _client_changed(self, name, old, new):\n self.session_id = new.session.session\n session_id = Unicode()\n\n def _banner1_default(self):\n return "Jupyter Console {version}\n".format(version=__version__)\n\n simple_prompt = Bool(False,\n help="""Use simple fallback prompt. Features may be limited."""\n ).tag(config=True)\n\n def __init__(self, **kwargs):\n # This is where traits with a config_key argument are updated\n # from the values on config.\n super(ZMQTerminalInteractiveShell, self).__init__(**kwargs)\n self.configurables = [self]\n\n self.init_history()\n self.init_completer()\n self.init_io()\n\n self.init_kernel_info()\n self.init_prompt_toolkit_cli()\n self.keep_running = True\n self.execution_count = 1\n\n def init_completer(self):\n """Initialize the completion machinery.\n\n This creates completion machinery that can be used by client code,\n either interactively in-process (typically triggered by the readline\n library), programmatically (such as in test suites) or out-of-process\n (typically over the network by remote frontends).\n """\n self.Completer = ZMQCompleter(self, self.client, config=self.config)\n\n def init_history(self):\n """Sets up the command history. """\n self.history_manager = ZMQHistoryManager(client=self.client)\n self.configurables.append(self.history_manager)\n\n def vi_mode(self):\n if (getattr(self, 'editing_mode', None) == 'vi'\n and self.prompt_includes_vi_mode):\n return '['+str(self.pt_cli.app.vi_state.input_mode)[3:6]+'] '\n return ''\n\n def get_prompt_tokens(self, ec=None):\n if ec is None:\n ec = self.execution_count\n return [\n (Token.Prompt, self.vi_mode()),\n (Token.Prompt, 'In ['),\n (Token.PromptNum, str(ec)),\n (Token.Prompt, ']: '),\n ]\n\n def get_continuation_tokens(self, width):\n return [\n (Token.Prompt, (" " * (width - 5)) + "...: "),\n ]\n\n def get_out_prompt_tokens(self):\n return [\n (Token.OutPrompt, 'Out['),\n (Token.OutPromptNum, str(self.execution_count)),\n (Token.OutPrompt, ']: ')\n ]\n\n def print_out_prompt(self):\n tokens = self.get_out_prompt_tokens()\n print_formatted_text(PygmentsTokens(tokens), end='',\n style = self.pt_cli.app.style)\n\n def get_remote_prompt_tokens(self):\n return [\n (Token.RemotePrompt, self.other_output_prefix),\n ]\n\n def print_remote_prompt(self, ec=None):\n tokens = self.get_remote_prompt_tokens() + self.get_prompt_tokens(ec=ec)\n print_formatted_text(\n PygmentsTokens(tokens), end="", style=self.pt_cli.app.style\n )\n\n @property\n def pt_complete_style(self):\n return {\n "multicolumn": CompleteStyle.MULTI_COLUMN,\n "column": CompleteStyle.COLUMN,\n "readlinelike": CompleteStyle.READLINE_LIKE,\n }[self.display_completions]\n\n kernel_info: DictType[str, AnyType] = {}\n\n def init_kernel_info(self):\n """Wait for a kernel to be ready, and store kernel info"""\n timeout = self.kernel_timeout\n tic = time.time()\n self.client.hb_channel.unpause()\n msg_id = self.client.kernel_info()\n while True:\n try:\n reply = self.client.get_shell_msg(timeout=1)\n except Empty as e:\n if (time.time() - tic) > timeout:\n raise RuntimeError("Kernel didn't respond to kernel_info_request") from e\n else:\n if reply['parent_header'].get('msg_id') == msg_id:\n self.kernel_info = reply['content']\n return\n\n def show_banner(self):\n print(self.banner.format(version=__version__,\n kernel_banner=self.kernel_info.get('banner', '')),end='',flush=True)\n\n def init_prompt_toolkit_cli(self):\n if self.simple_prompt or ('JUPYTER_CONSOLE_TEST' in os.environ):\n # Simple restricted interface for tests so we can find prompts with\n # pexpect. Multi-line input not supported.\n async def prompt():\n prompt = 'In [%d]: ' % self.execution_count\n raw = await async_input(prompt)\n return raw\n self.prompt_for_code = prompt\n self.print_out_prompt = \\n lambda: print('Out[%d]: ' % self.execution_count, end='')\n return\n\n kb = KeyBindings()\n insert_mode = vi_insert_mode | emacs_insert_mode\n\n @kb.add("enter", filter=(has_focus(DEFAULT_BUFFER)\n & ~has_selection\n & insert_mode\n ))\n def _(event):\n b = event.current_buffer\n d = b.document\n if not (d.on_last_line or d.cursor_position_row >= d.line_count\n - d.empty_line_count_at_the_end()):\n b.newline()\n return\n\n # Pressing enter flushes any pending display. This also ensures\n # the displayed execution_count is correct.\n self.handle_iopub()\n\n more, indent = self.check_complete(d.text)\n\n if (not more) and b.accept_handler:\n b.validate_and_handle()\n else:\n b.insert_text('\n' + indent)\n\n @kb.add("c-c", filter=has_focus(DEFAULT_BUFFER))\n def _(event):\n event.current_buffer.reset()\n\n @kb.add("c-\\", filter=has_focus(DEFAULT_BUFFER))\n def _(event):\n raise EOFError\n\n @kb.add("c-z", filter=Condition(lambda: suspend_to_background_supported()))\n def _(event):\n event.cli.suspend_to_background()\n\n @kb.add("c-o", filter=(has_focus(DEFAULT_BUFFER) & emacs_insert_mode))\n def _(event):\n event.current_buffer.insert_text("\n")\n\n # Pre-populate history from IPython's history database\n history = InMemoryHistory()\n last_cell = u""\n for _, _, cell in self.history_manager.get_tail(self.history_load_length,\n include_latest=True):\n # Ignore blank lines and consecutive duplicates\n cell = cell.rstrip()\n if cell and (cell != last_cell):\n history.append_string(cell)\n\n style_overrides = {\n Token.Prompt: '#009900',\n Token.PromptNum: '#00ff00 bold',\n Token.OutPrompt: '#ff2200',\n Token.OutPromptNum: '#ff0000 bold',\n Token.RemotePrompt: '#999900',\n }\n if self.highlighting_style:\n style_cls = get_style_by_name(self.highlighting_style)\n else:\n style_cls = get_style_by_name('default')\n # The default theme needs to be visible on both a dark background\n # and a light background, because we can't tell what the terminal\n # looks like. These tweaks to the default theme help with that.\n style_overrides.update({\n Token.Number: '#007700',\n Token.Operator: 'noinherit',\n Token.String: '#BB6622',\n Token.Name.Function: '#2080D0',\n Token.Name.Class: 'bold #2080D0',\n Token.Name.Namespace: 'bold #2080D0',\n })\n style_overrides.update(self.highlighting_style_overrides)\n style = merge_styles([\n style_from_pygments_cls(style_cls),\n style_from_pygments_dict(style_overrides),\n ])\n\n editing_mode = getattr(EditingMode, self.editing_mode.upper())\n langinfo = self.kernel_info.get('language_info', {})\n lexer = langinfo.get('pygments_lexer', langinfo.get('name', 'text'))\n\n # If enabled in the settings, highlight matching brackets\n # when the DEFAULT_BUFFER has the focus\n input_processors = [ConditionalProcessor(\n processor=HighlightMatchingBracketProcessor(chars='[](){}'),\n filter=has_focus(DEFAULT_BUFFER) & ~is_done &\n Condition(lambda: self.highlight_matching_brackets))\n ]\n\n # Tell prompt_toolkit to use the asyncio event loop.\n # Obsolete in prompt_toolkit.v3\n if not PTK3:\n use_asyncio_event_loop()\n\n self.pt_cli = PromptSession(\n message=(lambda: PygmentsTokens(self.get_prompt_tokens())),\n multiline=True,\n complete_style=self.pt_complete_style,\n editing_mode=editing_mode,\n lexer=PygmentsLexer(get_pygments_lexer(lexer)),\n prompt_continuation=(\n lambda width, lineno, is_soft_wrap: PygmentsTokens(\n self.get_continuation_tokens(width)\n )\n ),\n key_bindings=kb,\n history=history,\n completer=JupyterPTCompleter(self.Completer),\n enable_history_search=True,\n style=style,\n input_processors=input_processors,\n color_depth=(ColorDepth.TRUE_COLOR if self.true_color else None),\n )\n\n async def prompt_for_code(self):\n if self.next_input:\n default = self.next_input\n self.next_input = None\n else:\n default = ''\n\n if PTK3:\n text = await self.pt_cli.prompt_async(default=default)\n else:\n text = await self.pt_cli.prompt(default=default, async_=True)\n\n return text\n\n def init_io(self):\n if sys.platform not in {'win32', 'cli'}:\n return\n\n import colorama\n colorama.init()\n\n def check_complete(self, code):\n if self.use_kernel_is_complete:\n msg_id = self.client.is_complete(code)\n try:\n return self.handle_is_complete_reply(msg_id,\n timeout=self.kernel_is_complete_timeout)\n except SyntaxError:\n return False, ""\n else:\n lines = code.splitlines()\n if len(lines):\n more = (lines[-1] != "")\n return more, ""\n else:\n return False, ""\n\n def ask_exit(self):\n self.keep_running = False\n\n # This is set from payloads in handle_execute_reply\n next_input = None\n\n def pre_prompt(self):\n if self.next_input:\n # We can't set the buffer here, because it will be reset just after\n # this. Adding a callable to pre_run_callables does what we need\n # after the buffer is reset.\n s = self.next_input\n\n def set_doc():\n self.pt_cli.app.buffer.document = Document(s)\n if hasattr(self.pt_cli, 'pre_run_callables'):\n self.pt_cli.app.pre_run_callables.append(set_doc)\n else:\n # Older version of prompt_toolkit; it's OK to set the document\n # directly here.\n set_doc()\n self.next_input = None\n\n async def interact(self, loop=None, display_banner=None):\n while self.keep_running:\n print('\n', end='')\n\n try:\n code = await self.prompt_for_code()\n except EOFError:\n if (not self.confirm_exit) or \\n ask_yes_no('Do you really want to exit ([y]/n)?', 'y', 'n'):\n self.ask_exit()\n\n else:\n if code:\n self.run_cell(code, store_history=True)\n\n async def _main_task(self):\n loop = asyncio.get_running_loop()\n tasks = [asyncio.create_task(self.interact(loop=loop))]\n\n if self.include_other_output:\n # only poll the iopub channel asynchronously if we\n # wish to include external content\n tasks.append(asyncio.create_task(self.handle_external_iopub(loop=loop)))\n\n _, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED)\n\n for task in pending:\n task.cancel()\n try:\n await asyncio.gather(*pending)\n except asyncio.CancelledError:\n pass\n\n def mainloop(self):\n self.keepkernel = not self.own_kernel\n # An extra layer of protection in case someone mashing Ctrl-C breaks\n # out of our internal code.\n while True:\n try:\n asyncio.run(self._main_task())\n break\n except KeyboardInterrupt:\n print("\nKeyboardInterrupt escaped interact()\n")\n\n if self._eventloop:\n self._eventloop.close()\n if self.keepkernel and not self.own_kernel:\n print('keeping kernel alive')\n elif self.keepkernel and self.own_kernel:\n print("owning kernel, cannot keep it alive")\n self.client.shutdown()\n else:\n print("Shutting down kernel")\n self.client.shutdown()\n\n def run_cell(self, cell, store_history=True):\n """Run a complete IPython cell.\n\n Parameters\n ----------\n cell : str\n The code (including IPython code such as %magic functions) to run.\n store_history : bool\n If True, the raw and translated cell will be stored in IPython's\n history. For user code calling back into IPython's machinery, this\n should be set to False.\n """\n if (not cell) or cell.isspace():\n # pressing enter flushes any pending display\n self.handle_iopub()\n return\n\n # flush stale replies, which could have been ignored, due to missed heartbeats\n while run_sync(self.client.shell_channel.msg_ready)():\n run_sync(self.client.shell_channel.get_msg)()\n # execute takes 'hidden', which is the inverse of store_hist\n msg_id = self.client.execute(cell, not store_history)\n\n # first thing is wait for any side effects (output, stdin, etc.)\n self._executing = True\n self._execution_state = "busy"\n while self._execution_state != 'idle' and self.client.is_alive():\n try:\n self.handle_input_request(msg_id, timeout=0.05)\n except Empty:\n # display intermediate print statements, etc.\n self.handle_iopub(msg_id)\n except ZMQError as e:\n # Carry on if polling was interrupted by a signal\n if e.errno != errno.EINTR:\n raise\n\n # after all of that is done, wait for the execute reply\n while self.client.is_alive():\n try:\n self.handle_execute_reply(msg_id, timeout=0.05)\n except Empty:\n pass\n else:\n break\n self._executing = False\n\n #-----------------\n # message handlers\n #-----------------\n\n def handle_execute_reply(self, msg_id, timeout=None):\n kwargs = {"timeout": timeout}\n msg = run_sync(self.client.shell_channel.get_msg)(**kwargs)\n if msg["parent_header"].get("msg_id", None) == msg_id:\n\n self.handle_iopub(msg_id)\n\n content = msg["content"]\n status = content['status']\n\n if status == "aborted":\n sys.stdout.write("Aborted\n")\n return\n elif status == 'ok':\n # handle payloads\n for item in content.get("payload", []):\n source = item['source']\n if source == 'page':\n page.page(item['data']['text/plain'])\n elif source == 'set_next_input':\n self.next_input = item['text']\n elif source == 'ask_exit':\n self.keepkernel = item.get('keepkernel', False)\n self.ask_exit()\n\n elif status == 'error':\n pass\n\n self.execution_count = int(content["execution_count"] + 1)\n\n def handle_is_complete_reply(self, msg_id, timeout=None):\n """\n Wait for a repsonse from the kernel, and return two values:\n more? - (boolean) should the frontend ask for more input\n indent - an indent string to prefix the input\n Overloaded methods may want to examine the comeplete source. Its is\n in the self._source_lines_buffered list.\n """\n ## Get the is_complete response:\n msg = None\n try:\n kwargs = {"timeout": timeout}\n msg = run_sync(self.client.shell_channel.get_msg)(**kwargs)\n except Empty:\n warn('The kernel did not respond to an is_complete_request. '\n 'Setting `use_kernel_is_complete` to False.')\n self.use_kernel_is_complete = False\n return False, ""\n ## Handle response:\n if msg["parent_header"].get("msg_id", None) != msg_id:\n warn('The kernel did not respond properly to an is_complete_request: %s.' % str(msg))\n return False, ""\n else:\n status = msg["content"].get("status", None)\n indent = msg["content"].get("indent", "")\n ## Return more? and indent string\n if status == "complete":\n return False, indent\n elif status == "incomplete":\n return True, indent\n elif status == "invalid":\n raise SyntaxError()\n elif status == "unknown":\n return False, indent\n else:\n warn('The kernel sent an invalid is_complete_reply status: "%s".' % status)\n return False, indent\n\n include_other_output = Bool(False, config=True,\n help="""Whether to include output from clients\n other than this one sharing the same kernel.\n """\n )\n other_output_prefix = Unicode("Remote ", config=True,\n help="""Prefix to add to outputs coming from clients other than this one.\n\n Only relevant if include_other_output is True.\n """\n )\n\n def from_here(self, msg):\n """Return whether a message is from this session"""\n return msg['parent_header'].get("session", self.session_id) == self.session_id\n\n def include_output(self, msg):\n """Return whether we should include a given output message"""\n from_here = self.from_here(msg)\n if msg['msg_type'] == 'execute_input':\n # only echo inputs not from here\n return self.include_other_output and not from_here\n\n if self.include_other_output:\n return True\n else:\n return from_here\n\n async def handle_external_iopub(self, loop=None):\n while self.keep_running:\n # we need to check for keep_running from time to time\n poll_result = await ensure_async(self.client.iopub_channel.socket.poll(0))\n if poll_result:\n self.handle_iopub()\n await asyncio.sleep(0.5)\n\n def handle_iopub(self, msg_id=''):\n """Process messages on the IOPub channel\n\n This method consumes and processes messages on the IOPub channel,\n such as stdout, stderr, execute_result and status.\n\n It only displays output that is caused by this session.\n """\n while run_sync(self.client.iopub_channel.msg_ready)():\n sub_msg = run_sync(self.client.iopub_channel.get_msg)()\n msg_type = sub_msg['header']['msg_type']\n\n # Update execution_count in case it changed in another session\n if msg_type == "execute_input":\n self.execution_count = int(sub_msg["content"]["execution_count"]) + 1\n\n if self.include_output(sub_msg):\n if msg_type == 'status':\n self._execution_state = sub_msg["content"]["execution_state"]\n\n elif msg_type == 'stream':\n if sub_msg["content"]["name"] == "stdout":\n if self._pending_clearoutput:\n print("\r", end="")\n self._pending_clearoutput = False\n print(sub_msg["content"]["text"], end="")\n sys.stdout.flush()\n elif sub_msg["content"]["name"] == "stderr":\n if self._pending_clearoutput:\n print("\r", file=sys.stderr, end="")\n self._pending_clearoutput = False\n print(sub_msg["content"]["text"], file=sys.stderr, end="")\n sys.stderr.flush()\n\n elif msg_type == 'execute_result':\n if self._pending_clearoutput:\n print("\r", end="")\n self._pending_clearoutput = False\n self.execution_count = int(sub_msg["content"]["execution_count"])\n if not self.from_here(sub_msg):\n sys.stdout.write(self.other_output_prefix)\n format_dict = sub_msg["content"]["data"]\n self.handle_rich_data(format_dict)\n\n if 'text/plain' not in format_dict:\n continue\n\n # prompt_toolkit writes the prompt at a slightly lower level,\n # so flush streams first to ensure correct ordering.\n sys.stdout.flush()\n sys.stderr.flush()\n self.print_out_prompt()\n text_repr = format_dict['text/plain']\n if '\n' in text_repr:\n # For multi-line results, start a new line after prompt\n print()\n print(text_repr)\n\n # Remote: add new prompt\n if not self.from_here(sub_msg):\n sys.stdout.write('\n')\n sys.stdout.flush()\n self.print_remote_prompt()\n\n elif msg_type == 'display_data':\n data = sub_msg["content"]["data"]\n handled = self.handle_rich_data(data)\n if not handled:\n if not self.from_here(sub_msg):\n sys.stdout.write(self.other_output_prefix)\n # if it was an image, we handled it by now\n if 'text/plain' in data:\n print(data['text/plain'])\n\n # If execute input: print it\n elif msg_type == 'execute_input':\n content = sub_msg['content']\n ec = content.get('execution_count', self.execution_count - 1)\n\n # New line\n sys.stdout.write('\n')\n sys.stdout.flush()\n\n # With `Remote In [3]: `\n self.print_remote_prompt(ec=ec)\n\n # And the code\n sys.stdout.write(content['code'] + '\n')\n\n elif msg_type == 'clear_output':\n if sub_msg["content"]["wait"]:\n self._pending_clearoutput = True\n else:\n print("\r", end="")\n\n elif msg_type == 'error':\n for frame in sub_msg["content"]["traceback"]:\n print(frame, file=sys.stderr)\n\n _imagemime = {\n 'image/png': 'png',\n 'image/jpeg': 'jpeg',\n 'image/svg+xml': 'svg',\n }\n\n def handle_rich_data(self, data):\n for mime in self.mime_preference:\n if mime in data and mime in self._imagemime:\n if self.handle_image(data, mime):\n return True\n return False\n\n def handle_image(self, data, mime):\n handler = getattr(\n self, 'handle_image_{0}'.format(self.image_handler), None)\n if handler:\n return handler(data, mime)\n\n def handle_image_PIL(self, data, mime):\n if mime not in ('image/png', 'image/jpeg'):\n return False\n try:\n from PIL import Image, ImageShow\n except ImportError:\n return False\n raw = base64.decodebytes(data[mime].encode('ascii'))\n img = Image.open(BytesIO(raw))\n return ImageShow.show(img)\n\n def handle_image_stream(self, data, mime):\n raw = base64.decodebytes(data[mime].encode('ascii'))\n imageformat = self._imagemime[mime]\n fmt = dict(format=imageformat)\n args = [s.format(**fmt) for s in self.stream_image_handler]\n with subprocess.Popen(args, stdin=subprocess.PIPE,\n stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) as proc:\n proc.communicate(raw)\n return (proc.returncode == 0)\n\n def handle_image_tempfile(self, data, mime):\n raw = base64.decodebytes(data[mime].encode('ascii'))\n imageformat = self._imagemime[mime]\n filename = 'tmp.{0}'.format(imageformat)\n with TemporaryDirectory() as tempdir:\n fullpath = os.path.join(tempdir, filename)\n with open(fullpath, 'wb') as f:\n f.write(raw)\n fmt = dict(file=fullpath, format=imageformat)\n args = [s.format(**fmt) for s in self.tempfile_image_handler]\n rc = subprocess.call(args, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)\n return (rc == 0)\n\n def handle_image_callable(self, data, mime):\n res = self.callable_image_handler(data)\n if res is not False:\n # If handler func returns e.g. None, assume it has handled the data.\n res = True\n return res\n\n def handle_input_request(self, msg_id, timeout=0.1):\n """ Method to capture raw_input\n """\n req = run_sync(self.client.stdin_channel.get_msg)(timeout=timeout)\n # in case any iopub came while we were waiting:\n self.handle_iopub(msg_id)\n if msg_id == req["parent_header"].get("msg_id"):\n # wrap SIGINT handler\n real_handler = signal.getsignal(signal.SIGINT)\n\n def double_int(sig, frame):\n # call real handler (forwards sigint to kernel),\n # then raise local interrupt, stopping local raw_input\n real_handler(sig, frame)\n raise KeyboardInterrupt\n signal.signal(signal.SIGINT, double_int)\n content = req['content']\n read = getpass if content.get('password', False) else input\n try:\n raw_data = read(content["prompt"])\n except EOFError:\n # turn EOFError into EOF character\n raw_data = '\x04'\n except KeyboardInterrupt:\n sys.stdout.write('\n')\n return\n finally:\n # restore SIGINT handler\n signal.signal(signal.SIGINT, real_handler)\n\n # only send stdin reply if there *was not* another request\n # or execution finished while we were reading.\n if not (run_sync(self.client.stdin_channel.msg_ready)() or\n run_sync(self.client.shell_channel.msg_ready)()):\n self.client.input(raw_data)\n | .venv\Lib\site-packages\jupyter_console\ptshell.py | ptshell.py | Python | 38,655 | 0.95 | 0.173077 | 0.072072 | vue-tools | 977 | 2025-05-08T20:57:50.166611 | MIT | false | 3d01154085ca9924ae62832782f02f9b |
import inspect\nimport typing as t\nfrom jupyter_core.utils import run_sync as _run_sync, ensure_async # noqa\n\n\nT = t.TypeVar("T")\n\n\ndef run_sync(coro: t.Callable[..., t.Union[T, t.Awaitable[T]]]) -> t.Callable[..., T]:\n """Wraps coroutine in a function that blocks until it has executed.\n\n Parameters\n ----------\n coro : coroutine-function\n The coroutine-function to be executed.\n\n Returns\n -------\n result :\n Whatever the coroutine-function returns.\n """\n if not inspect.iscoroutinefunction(coro):\n return t.cast(t.Callable[..., T], coro)\n return _run_sync(coro)\n\n | .venv\Lib\site-packages\jupyter_console\utils.py | utils.py | Python | 616 | 0.95 | 0.24 | 0 | python-kit | 539 | 2025-01-07T17:42:54.125293 | BSD-3-Clause | false | 76a5b25ce1248035d415a949d0dd5a42 |
""" ZMQ Kernel History accessor and manager. """\n# -----------------------------------------------------------------------------\n# Copyright (C) 2010-2011 The IPython Development Team.\n#\n# Distributed under the terms of the BSD License.\n#\n# The full license is in the file LICENSE, distributed with this software.\n# -----------------------------------------------------------------------------\n\n# -----------------------------------------------------------------------------\n# Imports\n# -----------------------------------------------------------------------------\n\nfrom IPython.core.history import HistoryAccessorBase\nfrom traitlets import Dict, List\n\nfrom queue import Empty # Py 3\n\n\nclass ZMQHistoryManager(HistoryAccessorBase):\n """History accessor and manager for ZMQ-based kernels"""\n input_hist_parsed = List([""])\n output_hist = Dict()\n dir_hist = List()\n output_hist_reprs = Dict()\n\n def __init__(self, client):\n """\n Class to load the command-line history from a ZMQ-based kernel,\n and access the history.\n\n Parameters\n ----------\n\n client: `IPython.kernel.KernelClient`\n The kernel client in order to request the history.\n """\n self.client = client\n\n def _load_history(self, raw=True, output=False, hist_access_type='range',\n **kwargs):\n """\n Load the history over ZMQ from the kernel. Wraps the history\n messaging with loop to wait to get history results.\n """\n history = []\n if hasattr(self.client, "history"):\n # In tests, KernelClient may not have a history method\n msg_id = self.client.history(raw=raw, output=output,\n hist_access_type=hist_access_type,\n **kwargs)\n while True:\n try:\n reply = self.client.get_shell_msg(timeout=1)\n except Empty:\n break\n else:\n if reply['parent_header'].get('msg_id') == msg_id:\n history = reply['content'].get('history', [])\n break\n return history\n\n def get_tail(self, n=10, raw=True, output=False, include_latest=False):\n return self._load_history(hist_access_type='tail', n=n, raw=raw, \n output=output)\n\n def search(self, pattern="*", raw=True, search_raw=True,\n output=False, n=None, unique=False):\n return self._load_history(hist_access_type='search', pattern=pattern, \n raw=raw, search_raw=search_raw, \n output=output, n=n, unique=unique)\n\n def get_range(self, session, start=1, stop=None, raw=True, output=False):\n return self._load_history(hist_access_type='range', raw=raw, \n output=output, start=start, stop=stop,\n session=session)\n\n def get_range_by_str(self, rangestr, raw=True, output=False):\n return self._load_history(hist_access_type='range', raw=raw, \n output=output, rangestr=rangestr)\n\n def end_session(self):\n """\n Nothing to do for ZMQ-based histories.\n """\n pass\n\n def reset(self, new_session=True):\n """\n Nothing to do for ZMQ-based histories.\n """\n pass\n | .venv\Lib\site-packages\jupyter_console\zmqhistory.py | zmqhistory.py | Python | 3,461 | 0.95 | 0.173913 | 0.168831 | node-utils | 808 | 2025-06-05T21:45:45.294610 | BSD-3-Clause | false | f2b81a92bff7f6b4524216a5c7265b28 |
import re\nfrom typing import List, Union\n\n__version__ = "6.6.3"\n\n# Build up version_info tuple for backwards compatibility\npattern = r'(?P<major>\d+).(?P<minor>\d+).(?P<patch>\d+)(?P<rest>.*)'\nmatch = re.match(pattern, __version__)\nif match:\n parts: List[Union[int, str]] = [int(match[part]) for part in ['major', 'minor', 'patch']]\n if match['rest']:\n parts.append(match['rest'])\nelse:\n parts = []\nversion_info = tuple(parts)\n | .venv\Lib\site-packages\jupyter_console\_version.py | _version.py | Python | 443 | 0.95 | 0.266667 | 0.076923 | react-lib | 749 | 2024-11-22T20:21:50.087235 | MIT | false | b20e6d42e44f900c3d3f8fb22a3d7dda |
"""Jupyter terminal console"""\n\nfrom ._version import version_info, __version__ # noqa\n | .venv\Lib\site-packages\jupyter_console\__init__.py | __init__.py | Python | 88 | 0.75 | 0 | 0 | python-kit | 781 | 2025-05-18T13:56:35.974638 | Apache-2.0 | false | d0283ddfb6f5972caa9a69286d05a281 |
from jupyter_console import app\n\nif __name__ == '__main__':\n app.launch_new_instance()\n | .venv\Lib\site-packages\jupyter_console\__main__.py | __main__.py | Python | 90 | 0.65 | 0.25 | 0 | node-utils | 734 | 2024-09-17T09:42:52.385859 | BSD-3-Clause | false | edcd4750feae4a55581caaf9878b9a93 |
import pytest\n\n\n@pytest.fixture(autouse=True)\ndef env_setup(monkeypatch):\n monkeypatch.setenv("JUPYTER_CONSOLE_TEST", "1")\n | .venv\Lib\site-packages\jupyter_console\tests\conftest.py | conftest.py | Python | 126 | 0.85 | 0.166667 | 0 | python-kit | 587 | 2025-05-09T03:48:38.054605 | MIT | true | c1ed42528824410b23533dcd07b1ab94 |
"""Tests for two-process terminal frontend"""\n\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\n\nimport os\nimport shutil\nimport sys\nimport tempfile\nfrom subprocess import check_output\n\nfrom flaky import flaky\nimport pytest\n\nfrom traitlets.tests.utils import check_help_all_output\n\n\nshould_skip = sys.platform == "win32" or sys.version_info < (3,8) or sys.version_info[:2] == (3, 10) # noqa\n\n\n@flaky\n@pytest.mark.skipif(should_skip, reason="not supported")\ndef test_console_starts():\n """test that `jupyter console` starts a terminal"""\n p, pexpect, t = start_console()\n p.sendline("5")\n p.expect([r"Out\[\d+\]: 5", pexpect.EOF], timeout=t)\n p.expect([r"In \[\d+\]", pexpect.EOF], timeout=t)\n stop_console(p, pexpect, t)\n\ndef test_help_output():\n """jupyter console --help-all works"""\n check_help_all_output('jupyter_console')\n\n\n@flaky\n@pytest.mark.skipif(should_skip, reason="not supported")\ndef test_display_text():\n "Ensure display protocol plain/text key is supported"\n # equivalent of:\n #\n # x = %lsmagic\n # from IPython.display import display; display(x);\n p, pexpect, t = start_console()\n p.sendline('x = %lsmagic')\n p.expect(r'In \[\d+\]', timeout=t)\n p.sendline('from IPython.display import display; display(x);')\n p.expect(r'Available line magics:', timeout=t)\n p.expect(r'In \[\d+\]', timeout=t)\n stop_console(p, pexpect, t)\n\ndef stop_console(p, pexpect, t):\n "Stop a running `jupyter console` running via pexpect"\n # send ctrl-D;ctrl-D to exit\n p.sendeof()\n p.sendeof()\n p.expect([pexpect.EOF, pexpect.TIMEOUT], timeout=t)\n if p.isalive():\n p.terminate()\n\n\ndef start_console():\n "Start `jupyter console` using pexpect"\n import pexpect\n \n args = ['-m', 'jupyter_console', '--colors=NoColor']\n cmd = sys.executable\n env = os.environ.copy()\n env["JUPYTER_CONSOLE_TEST"] = "1"\n env["PROMPT_TOOLKIT_NO_CPR"] = "1"\n\n try:\n p = pexpect.spawn(cmd, args=args, env=env)\n except IOError:\n pytest.skip("Couldn't find command %s" % cmd)\n \n # timeout after two minutes\n t = 120\n p.expect(r"In \[\d+\]", timeout=t)\n return p, pexpect, t\n\n\ndef test_multiprocessing():\n p, pexpect, t = start_console()\n p.sendline('')\n\n\ndef test_generate_config():\n """jupyter console --generate-config works"""\n td = tempfile.mkdtemp()\n try:\n check_output([sys.executable, '-m', 'jupyter_console', '--generate-config'],\n env={'JUPYTER_CONFIG_DIR': td},\n )\n assert os.path.isfile(os.path.join(td, 'jupyter_console_config.py'))\n finally:\n shutil.rmtree(td)\n | .venv\Lib\site-packages\jupyter_console\tests\test_console.py | test_console.py | Python | 2,692 | 0.95 | 0.113402 | 0.105263 | node-utils | 199 | 2025-05-15T00:02:34.862859 | MIT | true | 2e7c78a7c1e423df4a9470d93d6797a1 |
# Copyright (c) IPython Development Team.\n# Distributed under the terms of the Modified BSD License.\n\nimport base64\nimport os\nimport sys\nfrom tempfile import TemporaryDirectory\nimport unittest\nfrom unittest.mock import patch\n\nimport pytest\n\nfrom jupyter_console.ptshell import ZMQTerminalInteractiveShell\n\n\nSCRIPT_PATH = os.path.join(\n os.path.abspath(os.path.dirname(__file__)), 'writetofile.py')\n\nclass NonCommunicatingShell(ZMQTerminalInteractiveShell):\n """A testing shell class that doesn't attempt to communicate with the kernel"""\n def init_kernel_info(self):\n pass\n\n\nclass ZMQTerminalInteractiveShellTestCase(unittest.TestCase):\n\n def setUp(self):\n self.shell = NonCommunicatingShell()\n self.raw = b'dummy data'\n self.mime = 'image/png'\n self.data = {self.mime: base64.encodebytes(self.raw).decode('ascii')}\n\n def test_call_pil_by_default(self):\n pil_called_with = []\n\n def pil_called(data, mime):\n pil_called_with.append(data)\n\n def raise_if_called(*args, **kwds):\n assert False\n\n shell = self.shell\n shell.handle_image_PIL = pil_called\n shell.handle_image_stream = raise_if_called\n shell.handle_image_tempfile = raise_if_called\n shell.handle_image_callable = raise_if_called\n\n shell.handle_image(None, None) # arguments are dummy\n assert len(pil_called_with) == 1\n\n def test_handle_image_PIL(self):\n pytest.importorskip('PIL')\n from PIL import Image, ImageShow\n\n open_called_with = []\n show_called_with = []\n\n def fake_open(arg):\n open_called_with.append(arg)\n\n def fake_show(img):\n show_called_with.append(img)\n\n with patch.object(Image, 'open', fake_open), \\n patch.object(ImageShow, 'show', fake_show):\n self.shell.handle_image_PIL(self.data, self.mime)\n\n self.assertEqual(len(open_called_with), 1)\n self.assertEqual(len(show_called_with), 1)\n self.assertEqual(open_called_with[0].getvalue(), self.raw)\n\n def check_handler_with_file(self, inpath, handler):\n shell = self.shell\n configname = '{0}_image_handler'.format(handler)\n funcname = 'handle_image_{0}'.format(handler)\n\n assert hasattr(shell, configname)\n assert hasattr(shell, funcname)\n\n with TemporaryDirectory() as tmpdir:\n outpath = os.path.join(tmpdir, 'data')\n cmd = [sys.executable, SCRIPT_PATH, inpath, outpath]\n setattr(shell, configname, cmd)\n getattr(shell, funcname)(self.data, self.mime)\n # cmd is called and file is closed. So it's safe to open now.\n with open(outpath, 'rb') as file:\n transferred = file.read()\n\n self.assertEqual(transferred, self.raw)\n\n def test_handle_image_stream(self):\n self.check_handler_with_file('-', 'stream')\n\n def test_handle_image_tempfile(self):\n self.check_handler_with_file('{file}', 'tempfile')\n\n def test_handle_image_callable(self):\n called_with = []\n self.shell.callable_image_handler = called_with.append\n self.shell.handle_image_callable(self.data, self.mime)\n self.assertEqual(len(called_with), 1)\n assert called_with[0] is self.data\n | .venv\Lib\site-packages\jupyter_console\tests\test_image_handler.py | test_image_handler.py | Python | 3,310 | 0.95 | 0.147059 | 0.04 | react-lib | 129 | 2023-08-22T15:02:20.482533 | BSD-3-Clause | true | 4902a46c4327aa56f308f17a820e2502 |
#-----------------------------------------------------------------------------\n# Copyright (C) 2012 The IPython Development Team\n#\n# Distributed under the terms of the BSD License. The full license is in\n# the file LICENSE, distributed as part of this software.\n#-----------------------------------------------------------------------------\n\n"""\nCopy data from input file to output file for testing.\n\nCommand line usage:\n\n python writetofile.py INPUT OUTPUT\n\nBinary data from INPUT file is copied to OUTPUT file.\nIf INPUT is '-', stdin is used.\n\n"""\n\nif __name__ == '__main__':\n import sys\n (inpath, outpath) = sys.argv[1:]\n\n if inpath == '-':\n infile = sys.stdin.buffer\n else:\n infile = open(inpath, 'rb')\n\n open(outpath, 'w+b').write(infile.read())\n | .venv\Lib\site-packages\jupyter_console\tests\writetofile.py | writetofile.py | Python | 783 | 0.95 | 0.103448 | 0.285714 | python-kit | 331 | 2024-07-30T10:55:41.051464 | Apache-2.0 | true | e1e552492dc961dadec67800c2a0fab4 |
\n\n | .venv\Lib\site-packages\jupyter_console\tests\__pycache__\conftest.cpython-313.pyc | conftest.cpython-313.pyc | Other | 495 | 0.7 | 0 | 0 | vue-tools | 537 | 2024-04-01T05:42:58.840411 | BSD-3-Clause | true | 99618d00bfd90a179cfe6cf9ea0f0827 |
\n\n | .venv\Lib\site-packages\jupyter_console\tests\__pycache__\test_console.cpython-313.pyc | test_console.cpython-313.pyc | Other | 4,633 | 0.95 | 0.021739 | 0.02381 | python-kit | 619 | 2024-03-07T13:38:46.027309 | Apache-2.0 | true | cd0f39fc637698d0fe6e087f25a8af80 |
\n\n | .venv\Lib\site-packages\jupyter_console\tests\__pycache__\test_image_handler.cpython-313.pyc | test_image_handler.cpython-313.pyc | Other | 6,996 | 0.95 | 0.015152 | 0 | node-utils | 703 | 2025-04-22T15:59:12.123454 | MIT | true | 1dfd3927049bf719ecbcdfaf6a61ca0f |
\n\n | .venv\Lib\site-packages\jupyter_console\tests\__pycache__\writetofile.cpython-313.pyc | writetofile.cpython-313.pyc | Other | 841 | 0.8 | 0.071429 | 0 | awesome-app | 989 | 2024-03-02T22:49:23.730010 | MIT | true | 11e04f9fdc7571f917b05b8c1355574e |
\n\n | .venv\Lib\site-packages\jupyter_console\tests\__pycache__\__init__.cpython-313.pyc | __init__.cpython-313.pyc | Other | 196 | 0.7 | 0 | 0 | react-lib | 954 | 2024-11-27T05:37:02.245349 | MIT | true | 165360974f68a598c4b971ec8d24d9b4 |
\n\n | .venv\Lib\site-packages\jupyter_console\__pycache__\app.cpython-313.pyc | app.cpython-313.pyc | Other | 5,925 | 0.95 | 0 | 0 | node-utils | 32 | 2025-02-22T00:46:11.731992 | BSD-3-Clause | false | 165f351b49069e7d9e9534c6bdce0442 |
\n\n | .venv\Lib\site-packages\jupyter_console\__pycache__\completer.cpython-313.pyc | completer.cpython-313.pyc | Other | 2,013 | 0.7 | 0.038462 | 0 | vue-tools | 784 | 2025-06-16T08:42:52.328045 | GPL-3.0 | false | 8beb4c382abacc14e5a339f18b4a11b3 |
\n\n | .venv\Lib\site-packages\jupyter_console\__pycache__\ptshell.cpython-313.pyc | ptshell.cpython-313.pyc | Other | 49,273 | 0.95 | 0.046256 | 0.009592 | node-utils | 877 | 2023-09-20T14:21:26.885782 | BSD-3-Clause | false | e7fb7c19a89bc3023d0fe436290e46df |
\n\n | .venv\Lib\site-packages\jupyter_console\__pycache__\utils.cpython-313.pyc | utils.cpython-313.pyc | Other | 1,217 | 0.95 | 0.25 | 0 | awesome-app | 961 | 2024-08-31T15:05:41.317316 | BSD-3-Clause | false | c412e5d620a08aa5ce0fa61c806d2953 |
\n\n | .venv\Lib\site-packages\jupyter_console\__pycache__\zmqhistory.cpython-313.pyc | zmqhistory.cpython-313.pyc | Other | 3,678 | 0.8 | 0.045455 | 0 | react-lib | 160 | 2024-03-27T17:23:32.807296 | GPL-3.0 | false | 6430676ca0567743836a9b8321a9ee57 |
\n\n | .venv\Lib\site-packages\jupyter_console\__pycache__\_version.cpython-313.pyc | _version.cpython-313.pyc | Other | 823 | 0.8 | 0 | 0 | vue-tools | 484 | 2024-04-11T14:02:17.351708 | GPL-3.0 | false | 34e9174e8ea92df9d65f47b82a472d5a |
\n\n | .venv\Lib\site-packages\jupyter_console\__pycache__\__init__.cpython-313.pyc | __init__.cpython-313.pyc | Other | 305 | 0.7 | 0 | 0 | python-kit | 624 | 2023-10-11T06:55:55.239637 | BSD-3-Clause | false | 5a1204535348ad0b97a2724ca92cd74f |
\n\n | .venv\Lib\site-packages\jupyter_console\__pycache__\__main__.cpython-313.pyc | __main__.cpython-313.pyc | Other | 350 | 0.7 | 0 | 0 | vue-tools | 904 | 2023-10-21T15:33:17.635246 | MIT | false | 02f8dd998ec0f98546baf9b20ac7eb3f |
[console_scripts]\njupyter-console = jupyter_console.app:main\n | .venv\Lib\site-packages\jupyter_console-6.6.3.dist-info\entry_points.txt | entry_points.txt | Other | 61 | 0.5 | 0 | 0 | node-utils | 103 | 2024-01-04T13:25:50.081918 | Apache-2.0 | false | 2cde917dc2e9a531a7cf5602ab04bbc2 |
pip\n | .venv\Lib\site-packages\jupyter_console-6.6.3.dist-info\INSTALLER | INSTALLER | Other | 4 | 0.5 | 0 | 0 | react-lib | 308 | 2024-09-05T01:35:02.086118 | GPL-3.0 | false | 365c9bfeb7d89244f2ce01c1de44cb85 |
Metadata-Version: 2.1\nName: jupyter-console\nVersion: 6.6.3\nSummary: Jupyter terminal console\nProject-URL: Homepage, https://jupyter.org\nAuthor-email: Jupyter Development Team <jupyter@googlegroups.com>\nLicense: BSD 3-Clause License\n \n - Copyright (c) 2001-2015, IPython Development Team\n - Copyright (c) 2015-, Jupyter Development Team\n \n All rights reserved.\n \n Redistribution and use in source and binary forms, with or without\n modification, are permitted provided that the following conditions are met:\n \n 1. Redistributions of source code must retain the above copyright notice, this\n list of conditions and the following disclaimer.\n \n 2. Redistributions in binary form must reproduce the above copyright notice,\n this list of conditions and the following disclaimer in the documentation\n and/or other materials provided with the distribution.\n \n 3. Neither the name of the copyright holder nor the names of its\n contributors may be used to endorse or promote products derived from\n this software without specific prior written permission.\n \n THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"\n AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\nLicense-File: LICENSE\nKeywords: Interactive,Interpreter,Shell,Web\nClassifier: Intended Audience :: Developers\nClassifier: Intended Audience :: Science/Research\nClassifier: Intended Audience :: System Administrators\nClassifier: License :: OSI Approved :: BSD License\nClassifier: Programming Language :: Python\nClassifier: Programming Language :: Python :: 3\nClassifier: Programming Language :: Python :: 3.7\nClassifier: Programming Language :: Python :: 3.8\nClassifier: Programming Language :: Python :: 3.9\nClassifier: Programming Language :: Python :: 3.10\nClassifier: Programming Language :: Python :: 3.11\nRequires-Python: >=3.7\nRequires-Dist: ipykernel>=6.14\nRequires-Dist: ipython\nRequires-Dist: jupyter-client>=7.0.0\nRequires-Dist: jupyter-core!=5.0.*,>=4.12\nRequires-Dist: prompt-toolkit>=3.0.30\nRequires-Dist: pygments\nRequires-Dist: pyzmq>=17\nRequires-Dist: traitlets>=5.4\nProvides-Extra: test\nRequires-Dist: flaky; extra == 'test'\nRequires-Dist: pexpect; extra == 'test'\nRequires-Dist: pytest; extra == 'test'\nDescription-Content-Type: text/markdown\n\n# Jupyter Console\n[](https://travis-ci.org/jupyter/jupyter_console)\n[](https://jupyter-console.readthedocs.io/en/latest/?badge=latest)\n\nA terminal-based console frontend for Jupyter kernels.\nThis code is based on the single-process IPython terminal.\n\nInstall with pip:\n\n pip install jupyter-console\n\nInstall with conda:\n\n conda install -c conda-forge jupyter_console\n\nStart:\n\n jupyter console\n\nHelp:\n\n jupyter console -h\n\nJupyter Console allows for console-based interaction with non-python \nJupyter kernels such as IJulia, IRKernel.\n\nTo start the console with a particular kernel, ask for it by name::\n\n jupyter console --kernel=julia-0.4\n\nA list of available kernels can be seen with::\n\n jupyter kernelspec list\n\n\n### Release build:\n\n```bash\n$ pip install pep517\n$ python -m pep517.build .\n```\n\n\n## Resources\n- [Project Jupyter website](https://jupyter.org)\n- [Documentation for Jupyter Console](https://jupyter-console.readthedocs.io/en/latest/) [[PDF](https://media.readthedocs.org/pdf/jupyter-console/latest/jupyter-console.pdf)]\n- [Documentation for Project Jupyter](https://jupyter.readthedocs.io/en/latest/index.html) [[PDF](https://media.readthedocs.org/pdf/jupyter/latest/jupyter.pdf)]\n- [Issues](https://github.com/jupyter/jupyter_console/issues)\n- [Technical support - Jupyter Google Group](https://groups.google.com/forum/#!forum/jupyter)\n\n## About the Jupyter Development Team\n\nThe Jupyter Development Team is the set of all contributors to the Jupyter project.\nThis includes all of the Jupyter subprojects.\n\nThe core team that coordinates development on GitHub can be found here:\nhttps://github.com/jupyter/.\n\n## Our Copyright Policy\n\nJupyter uses a shared copyright model. Each contributor maintains copyright\nover their contributions to Jupyter. But, it is important to note that these\ncontributions are typically only changes to the repositories. Thus, the Jupyter\nsource code, in its entirety is not the copyright of any single person or\ninstitution. Instead, it is the collective copyright of the entire Jupyter\nDevelopment Team. If individual contributors want to maintain a record of what\nchanges/contributions they have specific copyright on, they should indicate\ntheir copyright in the commit message of the change, when they commit the\nchange to one of the Jupyter repositories.\n\nWith this in mind, the following banner should be used in any source code file\nto indicate the copyright and license terms:\n\n```\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\n```\n | .venv\Lib\site-packages\jupyter_console-6.6.3.dist-info\METADATA | METADATA | Other | 5,833 | 0.95 | 0.035211 | 0.064815 | node-utils | 149 | 2024-09-04T00:32:17.881411 | BSD-3-Clause | false | 9096fb57bce2d25427384597d10584cc |
../../Scripts/jupyter-console.exe,sha256=__ankPmoXkwRfJrRyBHAtfKyr9Z-SX9vITjcsLZYhIU,108420\njupyter_console-6.6.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4\njupyter_console-6.6.3.dist-info/METADATA,sha256=Jl9bu4rvnFeIsCg9DUYTvhdYWKtajEOAiKl-Ew04KD0,5833\njupyter_console-6.6.3.dist-info/RECORD,,\njupyter_console-6.6.3.dist-info/WHEEL,sha256=Fd6mP6ydyRguakwUJ05oBE7fh2IPxgtDN9IwHJ9OqJQ,87\njupyter_console-6.6.3.dist-info/entry_points.txt,sha256=hgEYjJm5PBhO4wref-74TijoYRlqqRqoeG7IaTaYVRY,61\njupyter_console-6.6.3.dist-info/licenses/LICENSE,sha256=XKdOTS7rkzCw0SnCX4dNNUShNBO8Yq6NNngZEA0JUHI,1588\njupyter_console/__init__.py,sha256=eOFC_-OVzzDdVFJDZX3fTG4sOM198_RvSeQ1HyofjyQ,88\njupyter_console/__main__.py,sha256=9sjVFP9naGVc5XWQ6eh_6Ppfh0bYMQTFzQHTC_0Mxns,90\njupyter_console/__pycache__/__init__.cpython-313.pyc,,\njupyter_console/__pycache__/__main__.cpython-313.pyc,,\njupyter_console/__pycache__/_version.cpython-313.pyc,,\njupyter_console/__pycache__/app.cpython-313.pyc,,\njupyter_console/__pycache__/completer.cpython-313.pyc,,\njupyter_console/__pycache__/ptshell.cpython-313.pyc,,\njupyter_console/__pycache__/utils.cpython-313.pyc,,\njupyter_console/__pycache__/zmqhistory.cpython-313.pyc,,\njupyter_console/_version.py,sha256=TyhQyMM3Ip9WStFBB2EQ4WzFT3rX0Ura_Y8hxKGDjL0,443\njupyter_console/app.py,sha256=uE3KP6VK-4KnEAfefmWbQU_LtUE72UXD4X-Kr5l38VA,5054\njupyter_console/completer.py,sha256=PY1LJ8S97F8lcAbIMPDWK59n7xxd_DdGUTbHJ2UrFQY,1399\njupyter_console/ptshell.py,sha256=fL72QQ5Pj29qJWNrUhmXDu-VwC6MsiVZ6H6aZVPlGJI,38655\njupyter_console/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\njupyter_console/tests/__pycache__/__init__.cpython-313.pyc,,\njupyter_console/tests/__pycache__/conftest.cpython-313.pyc,,\njupyter_console/tests/__pycache__/test_console.cpython-313.pyc,,\njupyter_console/tests/__pycache__/test_image_handler.cpython-313.pyc,,\njupyter_console/tests/__pycache__/writetofile.cpython-313.pyc,,\njupyter_console/tests/conftest.py,sha256=XZIIbKgi7ZeKmaFEP_zSgDNj-rshkMIK-rmc7SE63DE,126\njupyter_console/tests/test_console.py,sha256=GtG2aja-O0EB-EuRc90MbleuXcQGCYdpCbWw1mQQ7Ak,2692\njupyter_console/tests/test_image_handler.py,sha256=ZaCCnB-xoiNZ404t0O839DRu0poewTA7eeHaDmtRubI,3310\njupyter_console/tests/writetofile.py,sha256=qSNiiT8PAgNW3sqi-RKL8AQs8JbV29wRsMfyAPCPPtA,783\njupyter_console/utils.py,sha256=H1scmX8a7KACE9_loaoqt5t9YVSRVZCLyaTuB7oKH5w,616\njupyter_console/zmqhistory.py,sha256=YCK76ZAe5zmZna3JH9MWxvYUY8v4YFTvkwzKZkDTsTw,3461\n | .venv\Lib\site-packages\jupyter_console-6.6.3.dist-info\RECORD | RECORD | Other | 2,537 | 0.7 | 0 | 0 | python-kit | 857 | 2025-06-10T05:41:10.088305 | Apache-2.0 | false | 45445c64e10071ee4e191ea3ee86f106 |
Wheel-Version: 1.0\nGenerator: hatchling 1.13.0\nRoot-Is-Purelib: true\nTag: py3-none-any\n | .venv\Lib\site-packages\jupyter_console-6.6.3.dist-info\WHEEL | WHEEL | Other | 87 | 0.5 | 0 | 0 | awesome-app | 445 | 2025-06-20T20:00:00.664103 | MIT | false | fe8a0d41a93fac4985beed707a0d84e4 |
BSD 3-Clause License\n\n- Copyright (c) 2001-2015, IPython Development Team\n- Copyright (c) 2015-, Jupyter Development Team\n\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are met:\n\n1. Redistributions of source code must retain the above copyright notice, this\n list of conditions and the following disclaimer.\n\n2. Redistributions in binary form must reproduce the above copyright notice,\n this list of conditions and the following disclaimer in the documentation\n and/or other materials provided with the distribution.\n\n3. Neither the name of the copyright holder nor the names of its\n contributors may be used to endorse or promote products derived from\n this software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"\nAND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\nIMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\nDISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\nFOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\nDAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\nSERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\nCAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\nOR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n | .venv\Lib\site-packages\jupyter_console-6.6.3.dist-info\licenses\LICENSE | LICENSE | Other | 1,588 | 0.7 | 0 | 0 | react-lib | 362 | 2025-04-28T04:16:52.589190 | Apache-2.0 | false | 083556a9912a35360dae8281fb57e886 |
"""\nA base Application class for Jupyter applications.\n\nAll Jupyter applications should inherit from this.\n"""\n\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\nfrom __future__ import annotations\n\nimport logging\nimport os\nimport sys\nimport typing as t\nfrom copy import deepcopy\nfrom pathlib import Path\nfrom shutil import which\n\nfrom traitlets import Bool, List, Unicode, observe\nfrom traitlets.config.application import Application, catch_config_error\nfrom traitlets.config.loader import ConfigFileNotFound\n\nfrom .paths import (\n allow_insecure_writes,\n issue_insecure_write_warning,\n jupyter_config_dir,\n jupyter_config_path,\n jupyter_data_dir,\n jupyter_path,\n jupyter_runtime_dir,\n)\nfrom .utils import ensure_dir_exists, ensure_event_loop\n\n# mypy: disable-error-code="no-untyped-call"\n\n# aliases and flags\n\nbase_aliases: dict[str, t.Any] = {}\nif isinstance(Application.aliases, dict):\n # traitlets 5\n base_aliases.update(Application.aliases)\n_jupyter_aliases = {\n "log-level": "Application.log_level",\n "config": "JupyterApp.config_file",\n}\nbase_aliases.update(_jupyter_aliases)\n\nbase_flags: dict[str, t.Any] = {}\nif isinstance(Application.flags, dict):\n # traitlets 5\n base_flags.update(Application.flags)\n_jupyter_flags: dict[str, t.Any] = {\n "debug": (\n {"Application": {"log_level": logging.DEBUG}},\n "set log level to logging.DEBUG (maximize logging output)",\n ),\n "generate-config": ({"JupyterApp": {"generate_config": True}}, "generate default config file"),\n "y": (\n {"JupyterApp": {"answer_yes": True}},\n "Answer yes to any questions instead of prompting.",\n ),\n}\nbase_flags.update(_jupyter_flags)\n\n\nclass NoStart(Exception):\n """Exception to raise when an application shouldn't start"""\n\n\nclass JupyterApp(Application):\n """Base class for Jupyter applications"""\n\n name = "jupyter" # override in subclasses\n description = "A Jupyter Application"\n\n aliases = base_aliases\n flags = base_flags\n\n def _log_level_default(self) -> int:\n return logging.INFO\n\n jupyter_path = List(Unicode())\n\n def _jupyter_path_default(self) -> list[str]:\n return jupyter_path()\n\n config_dir = Unicode()\n\n def _config_dir_default(self) -> str:\n return jupyter_config_dir()\n\n @property\n def config_file_paths(self) -> list[str]:\n path = jupyter_config_path()\n if self.config_dir not in path:\n # Insert config dir as first item.\n path.insert(0, self.config_dir)\n return path\n\n data_dir = Unicode()\n\n def _data_dir_default(self) -> str:\n d = jupyter_data_dir()\n ensure_dir_exists(d, mode=0o700)\n return d\n\n runtime_dir = Unicode()\n\n def _runtime_dir_default(self) -> str:\n rd = jupyter_runtime_dir()\n ensure_dir_exists(rd, mode=0o700)\n return rd\n\n @observe("runtime_dir")\n def _runtime_dir_changed(self, change: t.Any) -> None:\n ensure_dir_exists(change["new"], mode=0o700)\n\n generate_config = Bool(False, config=True, help="""Generate default config file.""")\n\n config_file_name = Unicode(config=True, help="Specify a config file to load.")\n\n def _config_file_name_default(self) -> str:\n if not self.name:\n return ""\n return self.name.replace("-", "_") + "_config"\n\n config_file = Unicode(\n config=True,\n help="""Full path of a config file.""",\n )\n\n answer_yes = Bool(False, config=True, help="""Answer yes to any prompts.""")\n\n def write_default_config(self) -> None:\n """Write our default config to a .py config file"""\n config_file: str\n if self.config_file:\n config_file = self.config_file\n else:\n config_file = str(Path(self.config_dir, self.config_file_name + ".py"))\n\n if Path(config_file).exists() and not self.answer_yes:\n answer = ""\n\n def ask() -> str:\n prompt = f"Overwrite {config_file!r} with default config? [y/N]"\n try:\n return input(prompt).lower() or "n"\n except KeyboardInterrupt:\n print("") # empty line\n return "n"\n\n answer = ask()\n while not answer.startswith(("y", "n")):\n print("Please answer 'yes' or 'no'")\n answer = ask()\n if answer.startswith("n"):\n return\n\n config_text = self.generate_config_file()\n print("Writing default config to: {config_file!r}")\n ensure_dir_exists(Path(config_file).parent.resolve(), 0o700)\n with Path.open(Path(config_file), mode="w", encoding="utf-8") as f:\n f.write(config_text)\n\n def migrate_config(self) -> None:\n """Migrate config/data from IPython 3"""\n try: # let's see if we can open the marker file\n # for reading and updating (writing)\n f_marker = Path.open(Path(self.config_dir, "migrated"), "r+")\n except FileNotFoundError: # cannot find the marker file\n pass # that means we have not migrated yet, so continue\n except OSError: # not readable and/or writable\n return # so let's give up migration in such an environment\n else: # if we got here without raising anything,\n # that means the file exists\n f_marker.close()\n return # so we must have already migrated -> bail out\n\n from .migrate import get_ipython_dir, migrate\n\n # No IPython dir, nothing to migrate\n if not Path(get_ipython_dir()).exists():\n return\n\n migrate()\n\n def load_config_file(self, suppress_errors: bool = True) -> None: # type:ignore[override]\n """Load the config file.\n\n By default, errors in loading config are handled, and a warning\n printed on screen. For testing, the suppress_errors option is set\n to False, so errors will make tests fail.\n """\n self.log.debug("Searching %s for config files", self.config_file_paths)\n base_config = "jupyter_config"\n try:\n super().load_config_file(\n base_config,\n path=self.config_file_paths,\n )\n except ConfigFileNotFound:\n # ignore errors loading parent\n self.log.debug("Config file %s not found", base_config)\n\n if self.config_file:\n path, config_file_name = os.path.split(self.config_file)\n else:\n path = self.config_file_paths # type:ignore[assignment]\n config_file_name = self.config_file_name\n\n if not config_file_name or (config_file_name == base_config):\n return\n\n try:\n super().load_config_file(config_file_name, path=path)\n except ConfigFileNotFound:\n self.log.debug("Config file not found, skipping: %s", config_file_name)\n except Exception:\n # Reraise errors for testing purposes, or if set in\n # self.raise_config_file_errors\n if (not suppress_errors) or self.raise_config_file_errors:\n raise\n self.log.warning("Error loading config file: %s", config_file_name, exc_info=True)\n\n # subcommand-related\n def _find_subcommand(self, name: str) -> str:\n name = f"{self.name}-{name}"\n return which(name) or ""\n\n @property\n def _dispatching(self) -> bool:\n """Return whether we are dispatching to another command\n\n or running ourselves.\n """\n return bool(self.generate_config or self.subapp or self.subcommand)\n\n subcommand = Unicode()\n\n @catch_config_error\n def initialize(self, argv: t.Any = None) -> None:\n """Initialize the application."""\n # don't hook up crash handler before parsing command-line\n if argv is None:\n argv = sys.argv[1:]\n if argv:\n subc = self._find_subcommand(argv[0])\n if subc:\n self.argv = argv\n self.subcommand = subc\n return\n self.parse_command_line(argv)\n cl_config = deepcopy(self.config)\n if self._dispatching:\n return\n self.migrate_config()\n self.load_config_file()\n # enforce cl-opts override configfile opts:\n self.update_config(cl_config)\n if allow_insecure_writes:\n issue_insecure_write_warning()\n\n def start(self) -> None:\n """Start the whole thing"""\n if self.subcommand:\n os.execv(self.subcommand, [self.subcommand] + self.argv[1:]) # noqa: S606\n raise NoStart()\n\n if self.subapp:\n self.subapp.start()\n raise NoStart()\n\n if self.generate_config:\n self.write_default_config()\n raise NoStart()\n\n @classmethod\n def launch_instance(cls, argv: t.Any = None, **kwargs: t.Any) -> None:\n """Launch an instance of a Jupyter Application"""\n # Ensure an event loop is set before any other code runs.\n loop = ensure_event_loop()\n try:\n super().launch_instance(argv=argv, **kwargs)\n except NoStart:\n return\n loop.close()\n\n\nclass JupyterAsyncApp(JupyterApp):\n """A Jupyter application that runs on an asyncio loop."""\n\n name = "jupyter_async" # override in subclasses\n description = "An Async Jupyter Application"\n\n # Set to True for tornado-based apps.\n _prefer_selector_loop = False\n\n async def initialize_async(self, argv: t.Any = None) -> None:\n """Initialize the application asynchronoously."""\n\n async def start_async(self) -> None:\n """Run the application in an event loop."""\n\n @classmethod\n async def _launch_instance(cls, argv: t.Any = None, **kwargs: t.Any) -> None:\n app = cls.instance(**kwargs)\n app.initialize(argv)\n await app.initialize_async(argv)\n await app.start_async()\n\n @classmethod\n def launch_instance(cls, argv: t.Any = None, **kwargs: t.Any) -> None:\n """Launch an instance of an async Jupyter Application"""\n loop = ensure_event_loop(cls._prefer_selector_loop)\n coro = cls._launch_instance(argv, **kwargs)\n loop.run_until_complete(coro)\n loop.close()\n\n\nif __name__ == "__main__":\n JupyterApp.launch_instance()\n | .venv\Lib\site-packages\jupyter_core\application.py | application.py | Python | 10,398 | 0.95 | 0.189441 | 0.069767 | vue-tools | 999 | 2025-06-17T08:00:19.967520 | GPL-3.0 | false | d83c83161366bf15f73481f1bfd711ee |
# PYTHON_ARGCOMPLETE_OK\n"""The root `jupyter` command.\n\nThis does nothing other than dispatch to subcommands or output path info.\n"""\n\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\nfrom __future__ import annotations\n\nimport argparse\nimport errno\nimport json\nimport os\nimport site\nimport sys\nimport sysconfig\nfrom pathlib import Path\nfrom shutil import which\nfrom subprocess import Popen\nfrom typing import Any\n\nfrom . import paths\nfrom .version import __version__\n\n\nclass JupyterParser(argparse.ArgumentParser):\n """A Jupyter argument parser."""\n\n @property\n def epilog(self) -> str:\n """Add subcommands to epilog on request\n\n Avoids searching PATH for subcommands unless help output is requested.\n """\n subcommands: str = " ".join(list_subcommands())\n return f"Available subcommands: {subcommands}"\n\n @epilog.setter\n def epilog(self, x: Any) -> None:\n """Ignore epilog set in Parser.__init__"""\n\n def argcomplete(self) -> None:\n """Trigger auto-completion, if enabled"""\n try:\n import argcomplete\n\n argcomplete.autocomplete(self)\n except ImportError:\n pass\n\n\ndef jupyter_parser() -> JupyterParser:\n """Create a jupyter parser object."""\n parser = JupyterParser(\n description="Jupyter: Interactive Computing",\n )\n group = parser.add_mutually_exclusive_group(required=False)\n # don't use argparse's version action because it prints to stderr on py2\n group.add_argument(\n "--version", action="store_true", help="show the versions of core jupyter packages and exit"\n )\n subcommand_action = group.add_argument(\n "subcommand", type=str, nargs="?", help="the subcommand to launch"\n )\n # For argcomplete, supply all known subcommands\n subcommand_action.completer = lambda *args, **kwargs: list_subcommands() # type: ignore[attr-defined] # noqa: ARG005\n\n group.add_argument("--config-dir", action="store_true", help="show Jupyter config dir")\n group.add_argument("--data-dir", action="store_true", help="show Jupyter data dir")\n group.add_argument("--runtime-dir", action="store_true", help="show Jupyter runtime dir")\n group.add_argument(\n "--paths",\n action="store_true",\n help="show all Jupyter paths. Add --json for machine-readable format.",\n )\n parser.add_argument("--json", action="store_true", help="output paths as machine-readable json")\n parser.add_argument("--debug", action="store_true", help="output debug information about paths")\n\n return parser\n\n\ndef list_subcommands() -> list[str]:\n """List all jupyter subcommands\n\n searches PATH for `jupyter-name`\n\n Returns a list of jupyter's subcommand names, without the `jupyter-` prefix.\n Nested children (e.g. jupyter-sub-subsub) are not included.\n """\n subcommand_tuples = set()\n # construct a set of `('foo', 'bar') from `jupyter-foo-bar`\n for d in _path_with_self():\n try:\n bin_paths = list(Path(d).iterdir())\n except OSError:\n continue\n for path in bin_paths:\n name = path.name\n if name.startswith("jupyter-"):\n if sys.platform.startswith("win"):\n # remove file-extension on Windows\n name = path.stem\n subcommand_tuples.add(tuple(name.split("-")[1:]))\n # build a set of subcommand strings, excluding subcommands whose parents are defined\n subcommands = set()\n # Only include `jupyter-foo-bar` if `jupyter-foo` is not already present\n for sub_tup in subcommand_tuples:\n if not any(sub_tup[:i] in subcommand_tuples for i in range(1, len(sub_tup))):\n subcommands.add("-".join(sub_tup))\n return sorted(subcommands)\n\n\ndef _execvp(cmd: str, argv: list[str]) -> None:\n """execvp, except on Windows where it uses Popen\n\n Python provides execvp on Windows, but its behavior is problematic (Python bug#9148).\n """\n if sys.platform.startswith("win"):\n # PATH is ignored when shell=False,\n # so rely on shutil.which\n cmd_path = which(cmd)\n if cmd_path is None:\n msg = f"{cmd!r} not found"\n raise OSError(msg, errno.ENOENT)\n p = Popen([cmd_path] + argv[1:]) # noqa: S603\n # Don't raise KeyboardInterrupt in the parent process.\n # Set this after spawning, to avoid subprocess inheriting handler.\n import signal\n\n signal.signal(signal.SIGINT, signal.SIG_IGN)\n p.wait()\n sys.exit(p.returncode)\n else:\n os.execvp(cmd, argv) # noqa: S606\n\n\ndef _jupyter_abspath(subcommand: str) -> str:\n """This method get the abspath of a specified jupyter-subcommand with no\n changes on ENV.\n """\n # get env PATH with self\n search_path = os.pathsep.join(_path_with_self())\n # get the abs path for the jupyter-<subcommand>\n jupyter_subcommand = f"jupyter-{subcommand}"\n abs_path = which(jupyter_subcommand, path=search_path)\n if abs_path is None:\n msg = f"\nJupyter command `{jupyter_subcommand}` not found."\n raise Exception(msg)\n\n if not os.access(abs_path, os.X_OK):\n msg = f"\nJupyter command `{jupyter_subcommand}` is not executable."\n raise Exception(msg)\n\n return abs_path\n\n\ndef _path_with_self() -> list[str]:\n """Put `jupyter`'s dir at the front of PATH\n\n Ensures that /path/to/jupyter subcommand\n will do /path/to/jupyter-subcommand\n even if /other/jupyter-subcommand is ahead of it on PATH\n """\n path_list = (os.environ.get("PATH") or os.defpath).split(os.pathsep)\n\n # Insert the "scripts" directory for this Python installation\n # This allows the "jupyter" command to be relocated, while still\n # finding subcommands that have been installed in the default\n # location.\n # We put the scripts directory at the *end* of PATH, so that\n # if the user explicitly overrides a subcommand, that override\n # still takes effect.\n try:\n bindir = sysconfig.get_path("scripts")\n except KeyError:\n # The Python environment does not specify a "scripts" location\n pass\n else:\n path_list.append(bindir)\n\n scripts = [sys.argv[0]]\n if Path(scripts[0]).is_symlink():\n # include realpath, if `jupyter` is a symlink\n scripts.append(os.path.realpath(scripts[0]))\n\n for script in scripts:\n bindir = str(Path(script).parent)\n if Path(bindir).is_dir() and os.access(script, os.X_OK): # only if it's a script\n # ensure executable's dir is on PATH\n # avoids missing subcommands when jupyter is run via absolute path\n path_list.insert(0, bindir)\n return path_list\n\n\ndef _evaluate_argcomplete(parser: JupyterParser) -> list[str]:\n """If argcomplete is enabled, trigger autocomplete or return current words\n\n If the first word looks like a subcommand, return the current command\n that is attempting to be completed so that the subcommand can evaluate it;\n otherwise auto-complete using the main parser.\n """\n try:\n # traitlets >= 5.8 provides some argcomplete support,\n # use helper methods to jump to argcomplete\n from traitlets.config.argcomplete_config import (\n get_argcomplete_cwords,\n increment_argcomplete_index,\n )\n\n cwords = get_argcomplete_cwords()\n if cwords and len(cwords) > 1 and not cwords[1].startswith("-"):\n # If first completion word looks like a subcommand,\n # increment word from which to start handling arguments\n increment_argcomplete_index()\n return cwords\n # Otherwise no subcommand, directly autocomplete and exit\n parser.argcomplete()\n except ImportError:\n # traitlets >= 5.8 not available, just try to complete this without\n # worrying about subcommands\n parser.argcomplete()\n msg = "Control flow should not reach end of autocomplete()"\n raise AssertionError(msg)\n\n\ndef main() -> None:\n """The command entry point."""\n parser = jupyter_parser()\n argv = sys.argv\n subcommand = None\n if "_ARGCOMPLETE" in os.environ:\n argv = _evaluate_argcomplete(parser)\n subcommand = argv[1]\n elif len(argv) > 1 and not argv[1].startswith("-"):\n # Don't parse if a subcommand is given\n # Avoids argparse gobbling up args passed to subcommand, such as `-h`.\n subcommand = argv[1]\n else:\n args, opts = parser.parse_known_args()\n subcommand = args.subcommand\n if args.version:\n print("Selected Jupyter core packages...")\n for package in [\n "IPython",\n "ipykernel",\n "ipywidgets",\n "jupyter_client",\n "jupyter_core",\n "jupyter_server",\n "jupyterlab",\n "nbclient",\n "nbconvert",\n "nbformat",\n "notebook",\n "qtconsole",\n "traitlets",\n ]:\n try:\n if package == "jupyter_core": # We're already here\n version = __version__\n else:\n mod = __import__(package)\n version = mod.__version__\n except ImportError:\n version = "not installed"\n print(f"{package:<17}:", version)\n return\n if args.json and not args.paths:\n sys.exit("--json is only used with --paths")\n if args.debug and not args.paths:\n sys.exit("--debug is only used with --paths")\n if args.debug and args.json:\n sys.exit("--debug cannot be used with --json")\n if args.config_dir:\n print(paths.jupyter_config_dir())\n return\n if args.data_dir:\n print(paths.jupyter_data_dir())\n return\n if args.runtime_dir:\n print(paths.jupyter_runtime_dir())\n return\n if args.paths:\n data = {}\n data["runtime"] = [paths.jupyter_runtime_dir()]\n data["config"] = paths.jupyter_config_path()\n data["data"] = paths.jupyter_path()\n if args.json:\n print(json.dumps(data))\n else:\n if args.debug:\n env = os.environ\n\n if paths.use_platform_dirs():\n print(\n "JUPYTER_PLATFORM_DIRS is set to a true value, so we use platformdirs to find platform-specific directories"\n )\n else:\n print(\n "JUPYTER_PLATFORM_DIRS is set to a false value, or is not set, so we use hardcoded legacy paths for platform-specific directories"\n )\n\n if paths.prefer_environment_over_user():\n print(\n "JUPYTER_PREFER_ENV_PATH is set to a true value, or JUPYTER_PREFER_ENV_PATH is not set and we detected a virtual environment, making the environment-level path preferred over the user-level path for data and config"\n )\n else:\n print(\n "JUPYTER_PREFER_ENV_PATH is set to a false value, or JUPYTER_PREFER_ENV_PATH is not set and we did not detect a virtual environment, making the user-level path preferred over the environment-level path for data and config"\n )\n\n # config path list\n if env.get("JUPYTER_NO_CONFIG"):\n print(\n "JUPYTER_NO_CONFIG is set, making the config path list only a single temporary directory"\n )\n else:\n print(\n "JUPYTER_NO_CONFIG is not set, so we use the full path list for config"\n )\n\n if env.get("JUPYTER_CONFIG_PATH"):\n print(\n f"JUPYTER_CONFIG_PATH is set to '{env.get('JUPYTER_CONFIG_PATH')}', which is prepended to the config path list (unless JUPYTER_NO_CONFIG is set)"\n )\n else:\n print(\n "JUPYTER_CONFIG_PATH is not set, so we do not prepend anything to the config paths"\n )\n\n if env.get("JUPYTER_CONFIG_DIR"):\n print(\n f"JUPYTER_CONFIG_DIR is set to '{env.get('JUPYTER_CONFIG_DIR')}', overriding the default user-level config directory"\n )\n else:\n print(\n "JUPYTER_CONFIG_DIR is not set, so we use the default user-level config directory"\n )\n\n if site.ENABLE_USER_SITE:\n print(\n f"Python's site.ENABLE_USER_SITE is True, so we add the user site directory '{site.getuserbase()}'"\n )\n else:\n print(\n f"Python's site.ENABLE_USER_SITE is not True, so we do not add the Python site user directory '{site.getuserbase()}'"\n )\n\n # data path list\n if env.get("JUPYTER_PATH"):\n print(\n f"JUPYTER_PATH is set to '{env.get('JUPYTER_PATH')}', which is prepended to the data paths"\n )\n else:\n print(\n "JUPYTER_PATH is not set, so we do not prepend anything to the data paths"\n )\n\n if env.get("JUPYTER_DATA_DIR"):\n print(\n f"JUPYTER_DATA_DIR is set to '{env.get('JUPYTER_DATA_DIR')}', overriding the default user-level data directory"\n )\n else:\n print(\n "JUPYTER_DATA_DIR is not set, so we use the default user-level data directory"\n )\n\n # runtime directory\n if env.get("JUPYTER_RUNTIME_DIR"):\n print(\n f"JUPYTER_RUNTIME_DIR is set to '{env.get('JUPYTER_RUNTIME_DIR')}', overriding the default runtime directory"\n )\n else:\n print(\n "JUPYTER_RUNTIME_DIR is not set, so we use the default runtime directory"\n )\n\n print()\n\n for name in sorted(data):\n path = data[name]\n print(f"{name}:")\n for p in path:\n print(" " + p)\n return\n\n if not subcommand:\n parser.print_help(file=sys.stderr)\n sys.exit("\nPlease specify a subcommand or one of the optional arguments.")\n\n try:\n command = _jupyter_abspath(subcommand)\n except Exception as e:\n parser.print_help(file=sys.stderr)\n # special-case alias of "jupyter help" to "jupyter --help"\n if subcommand == "help":\n return\n sys.exit(str(e))\n\n try:\n _execvp(command, [command] + argv[2:])\n except OSError as e:\n sys.exit(f"Error executing Jupyter command {subcommand!r}: {e}")\n\n\nif __name__ == "__main__":\n main()\n | .venv\Lib\site-packages\jupyter_core\command.py | command.py | Python | 15,721 | 0.95 | 0.191176 | 0.110482 | awesome-app | 956 | 2023-07-21T08:21:27.626986 | Apache-2.0 | false | 5fd4fdbbf84c56e830944452e1cb9e3b |
# PYTHON_ARGCOMPLETE_OK\n"""Migrating IPython < 4.0 to Jupyter\n\nThis *copies* configuration and resources to their new locations in Jupyter\n\nMigrations:\n\n- .ipython/\n - nbextensions -> JUPYTER_DATA_DIR/nbextensions\n - kernels -> JUPYTER_DATA_DIR/kernels\n\n- .ipython/profile_default/\n - static/custom -> .jupyter/custom\n - nbconfig -> .jupyter/nbconfig\n - security/\n\n - notebook_secret, notebook_cookie_secret, nbsignatures.db -> JUPYTER_DATA_DIR\n\n - ipython_{notebook,nbconvert,qtconsole}_config.py -> .jupyter/jupyter_{name}_config.py\n\n\n"""\n\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\nfrom __future__ import annotations\n\nimport os\nimport re\nimport shutil\nfrom datetime import datetime, timezone\nfrom pathlib import Path\nfrom typing import Any\n\nfrom traitlets.config.loader import JSONFileConfigLoader, PyFileConfigLoader\nfrom traitlets.log import get_logger\n\nfrom .application import JupyterApp\nfrom .paths import jupyter_config_dir, jupyter_data_dir\nfrom .utils import ensure_dir_exists\n\n# mypy: disable-error-code="no-untyped-call"\n\n\nmigrations = {\n str(Path("{ipython_dir}", "nbextensions")): str(Path("{jupyter_data}", "nbextensions")),\n str(Path("{ipython_dir}", "kernels")): str(Path("{jupyter_data}", "kernels")),\n str(Path("{profile}", "nbconfig")): str(Path("{jupyter_config}", "nbconfig")),\n}\n\ncustom_src_t = str(Path("{profile}", "static", "custom"))\ncustom_dst_t = str(Path("{jupyter_config}", "custom"))\n\nfor security_file in ("notebook_secret", "notebook_cookie_secret", "nbsignatures.db"):\n src = str(Path("{profile}", "security", security_file))\n dst = str(Path("{jupyter_data}", security_file))\n migrations[src] = dst\n\nconfig_migrations = ["notebook", "nbconvert", "qtconsole"]\n\nregex = re.compile\n\nconfig_substitutions = {\n regex(r"\bIPythonQtConsoleApp\b"): "JupyterQtConsoleApp",\n regex(r"\bIPythonWidget\b"): "JupyterWidget",\n regex(r"\bRichIPythonWidget\b"): "RichJupyterWidget",\n regex(r"\bIPython\.html\b"): "notebook",\n regex(r"\bIPython\.nbconvert\b"): "nbconvert",\n}\n\n\ndef get_ipython_dir() -> str:\n """Return the IPython directory location.\n\n Not imported from IPython because the IPython implementation\n ensures that a writable directory exists,\n creating a temporary directory if not.\n We don't want to trigger that when checking if migration should happen.\n\n We only need to support the IPython < 4 behavior for migration,\n so importing for forward-compatibility and edge cases is not important.\n """\n return os.environ.get("IPYTHONDIR", str(Path("~/.ipython").expanduser()))\n\n\ndef migrate_dir(src: str, dst: str) -> bool:\n """Migrate a directory from src to dst"""\n log = get_logger()\n src_path = Path(src)\n dst_path = Path(dst)\n if not any(src_path.iterdir()):\n log.debug("No files in %s", src)\n return False\n if dst_path.exists():\n if any(dst_path.iterdir()):\n # already exists, non-empty\n log.debug("%s already exists", dst)\n return False\n dst_path.rmdir()\n log.info("Copying %s -> %s", src, dst)\n ensure_dir_exists(dst_path.parent)\n shutil.copytree(src, dst, symlinks=True)\n return True\n\n\ndef migrate_file(src: str | Path, dst: str | Path, substitutions: Any = None) -> bool:\n """Migrate a single file from src to dst\n\n substitutions is an optional dict of {regex: replacement} for performing replacements on the file.\n """\n log = get_logger()\n dst_path = Path(dst)\n if dst_path.exists():\n # already exists\n log.debug("%s already exists", dst)\n return False\n log.info("Copying %s -> %s", src, dst)\n ensure_dir_exists(dst_path.parent)\n shutil.copy(src, dst)\n if substitutions:\n with dst_path.open() as f:\n text = f.read()\n for pat, replacement in substitutions.items():\n text = pat.sub(replacement, text)\n with dst_path.open("w") as f:\n f.write(text)\n return True\n\n\ndef migrate_one(src: str, dst: str) -> bool:\n """Migrate one item\n\n dispatches to migrate_dir/_file\n """\n log = get_logger()\n if Path(src).is_file():\n return migrate_file(src, dst)\n if Path(src).is_dir():\n return migrate_dir(src, dst)\n log.debug("Nothing to migrate for %s", src)\n return False\n\n\ndef migrate_static_custom(src: str, dst: str) -> bool:\n """Migrate non-empty custom.js,css from src to dst\n\n src, dst are 'custom' directories containing custom.{js,css}\n """\n log = get_logger()\n migrated = False\n\n custom_js = Path(src, "custom.js")\n custom_css = Path(src, "custom.css")\n # check if custom_js is empty:\n custom_js_empty = True\n if Path(custom_js).is_file():\n with Path.open(custom_js, encoding="utf-8") as f:\n js = f.read().strip()\n for line in js.splitlines():\n if not (line.isspace() or line.strip().startswith(("/*", "*", "//"))):\n custom_js_empty = False\n break\n\n # check if custom_css is empty:\n custom_css_empty = True\n if Path(custom_css).is_file():\n with Path.open(custom_css, encoding="utf-8") as f:\n css = f.read().strip()\n custom_css_empty = css.startswith("/*") and css.endswith("*/")\n\n if custom_js_empty:\n log.debug("Ignoring empty %s", custom_js)\n if custom_css_empty:\n log.debug("Ignoring empty %s", custom_css)\n\n if custom_js_empty and custom_css_empty:\n # nothing to migrate\n return False\n ensure_dir_exists(dst)\n\n if not custom_js_empty or not custom_css_empty:\n ensure_dir_exists(dst)\n\n if not custom_js_empty and migrate_file(custom_js, Path(dst, "custom.js")):\n migrated = True\n if not custom_css_empty and migrate_file(custom_css, Path(dst, "custom.css")):\n migrated = True\n\n return migrated\n\n\ndef migrate_config(name: str, env: Any) -> list[Any]:\n """Migrate a config file.\n\n Includes substitutions for updated configurable names.\n """\n log = get_logger()\n src_base = str(Path(f"{env['profile']}", f"ipython_{name}_config"))\n dst_base = str(Path(f"{env['jupyter_config']}", f"jupyter_{name}_config"))\n loaders = {\n ".py": PyFileConfigLoader,\n ".json": JSONFileConfigLoader,\n }\n migrated = []\n for ext in (".py", ".json"):\n src = src_base + ext\n dst = dst_base + ext\n if Path(src).exists():\n cfg = loaders[ext](src).load_config()\n if cfg:\n if migrate_file(src, dst, substitutions=config_substitutions):\n migrated.append(src)\n else:\n # don't migrate empty config files\n log.debug("Not migrating empty config file: %s", src)\n return migrated\n\n\ndef migrate() -> bool:\n """Migrate IPython configuration to Jupyter"""\n env = {\n "jupyter_data": jupyter_data_dir(),\n "jupyter_config": jupyter_config_dir(),\n "ipython_dir": get_ipython_dir(),\n "profile": str(Path(get_ipython_dir(), "profile_default")),\n }\n migrated = False\n for src_t, dst_t in migrations.items():\n src = src_t.format(**env)\n dst = dst_t.format(**env)\n if Path(src).exists() and migrate_one(src, dst):\n migrated = True\n\n for name in config_migrations:\n if migrate_config(name, env):\n migrated = True\n\n custom_src = custom_src_t.format(**env)\n custom_dst = custom_dst_t.format(**env)\n\n if Path(custom_src).exists() and migrate_static_custom(custom_src, custom_dst):\n migrated = True\n\n # write a marker to avoid re-running migration checks\n ensure_dir_exists(env["jupyter_config"])\n with Path.open(Path(env["jupyter_config"], "migrated"), "w", encoding="utf-8") as f:\n f.write(datetime.now(tz=timezone.utc).isoformat())\n\n return migrated\n\n\nclass JupyterMigrate(JupyterApp):\n """A Jupyter Migration App."""\n\n name = "jupyter-migrate"\n description = """\n Migrate configuration and data from .ipython prior to 4.0 to Jupyter locations.\n\n This migrates:\n\n - config files in the default profile\n - kernels in ~/.ipython/kernels\n - notebook javascript extensions in ~/.ipython/extensions\n - custom.js/css to .jupyter/custom\n\n to their new Jupyter locations.\n\n All files are copied, not moved.\n If the destinations already exist, nothing will be done.\n """\n\n def start(self) -> None:\n """Start the application."""\n if not migrate():\n self.log.info("Found nothing to migrate.")\n\n\nmain = JupyterMigrate.launch_instance\n\n\nif __name__ == "__main__":\n main()\n | .venv\Lib\site-packages\jupyter_core\migrate.py | migrate.py | Python | 8,696 | 0.95 | 0.170213 | 0.050459 | python-kit | 961 | 2024-04-14T14:38:18.758769 | MIT | false | 5f9c9a5f93b538529f51c5910f666f05 |
"""Path utility functions."""\n\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\n\n# Derived from IPython.utils.path, which is\n# Copyright (c) IPython Development Team.\n# Distributed under the terms of the Modified BSD License.\nfrom __future__ import annotations\n\nimport errno\nimport os\nimport site\nimport stat\nimport sys\nimport tempfile\nimport warnings\nfrom contextlib import contextmanager\nfrom pathlib import Path\nfrom typing import Any, Iterator, Literal, Optional, overload\n\nimport platformdirs\n\nfrom .utils import deprecation\n\npjoin = os.path.join\n\n# Capitalize Jupyter in paths only on Windows and MacOS (when not in Homebrew)\nif sys.platform == "win32" or (\n sys.platform == "darwin" and not sys.prefix.startswith("/opt/homebrew")\n):\n APPNAME = "Jupyter"\nelse:\n APPNAME = "jupyter"\n\n# UF_HIDDEN is a stat flag not defined in the stat module.\n# It is used by BSD to indicate hidden files.\nUF_HIDDEN = getattr(stat, "UF_HIDDEN", 32768)\n\n\n@overload\ndef envset(name: str, default: bool = False) -> bool: ...\n\n\n@overload\ndef envset(name: str, default: Literal[None]) -> Optional[bool]: ...\n\n\ndef envset(name: str, default: Optional[bool] = False) -> Optional[bool]:\n """Return the boolean value of a given environment variable.\n\n An environment variable is considered set if it is assigned to a value\n other than 'no', 'n', 'false', 'off', '0', or '0.0' (case insensitive)\n\n If the environment variable is not defined, the default value is returned.\n """\n if name not in os.environ:\n return default\n\n return os.environ[name].lower() not in ["no", "n", "false", "off", "0", "0.0"]\n\n\ndef use_platform_dirs() -> bool:\n """Determine if platformdirs should be used for system-specific paths.\n\n We plan for this to default to False in jupyter_core version 5 and to True\n in jupyter_core version 6.\n """\n return envset("JUPYTER_PLATFORM_DIRS", False)\n\n\ndef get_home_dir() -> str:\n """Get the real path of the home directory"""\n homedir = Path("~").expanduser()\n # Next line will make things work even when /home/ is a symlink to\n # /usr/home as it is on FreeBSD, for example\n return str(Path(homedir).resolve())\n\n\n_dtemps: dict[str, str] = {}\n\n\ndef _do_i_own(path: str) -> bool:\n """Return whether the current user owns the given path"""\n p = Path(path).resolve()\n\n # walk up to first existing parent\n while not p.exists() and p != p.parent:\n p = p.parent\n\n # simplest check: owner by name\n # not always implemented or available\n try:\n return p.owner() == os.getlogin()\n except Exception: # noqa: S110\n pass\n\n if hasattr(os, "geteuid"):\n try:\n st = p.stat()\n return st.st_uid == os.geteuid()\n except (NotImplementedError, OSError):\n # geteuid not always implemented\n pass\n\n # no ownership checks worked, check write access\n return os.access(p, os.W_OK)\n\n\ndef prefer_environment_over_user() -> bool:\n """Determine if environment-level paths should take precedence over user-level paths."""\n # If JUPYTER_PREFER_ENV_PATH is defined, that signals user intent, so return its value\n if "JUPYTER_PREFER_ENV_PATH" in os.environ:\n return envset("JUPYTER_PREFER_ENV_PATH")\n\n # If we are in a Python virtualenv, default to True (see https://docs.python.org/3/library/venv.html#venv-def)\n if sys.prefix != sys.base_prefix and _do_i_own(sys.prefix):\n return True\n\n # If sys.prefix indicates Python comes from a conda/mamba environment that is not the root environment, default to True\n if (\n "CONDA_PREFIX" in os.environ\n and sys.prefix.startswith(os.environ["CONDA_PREFIX"])\n and os.environ.get("CONDA_DEFAULT_ENV", "base") != "base"\n and _do_i_own(sys.prefix)\n ):\n return True\n\n return False\n\n\ndef _mkdtemp_once(name: str) -> str:\n """Make or reuse a temporary directory.\n\n If this is called with the same name in the same process, it will return\n the same directory.\n """\n try:\n return _dtemps[name]\n except KeyError:\n d = _dtemps[name] = tempfile.mkdtemp(prefix=name + "-")\n return d\n\n\ndef jupyter_config_dir() -> str:\n """Get the Jupyter config directory for this platform and user.\n\n Returns JUPYTER_CONFIG_DIR if defined, otherwise the appropriate\n directory for the platform.\n """\n\n env = os.environ\n if env.get("JUPYTER_NO_CONFIG"):\n return _mkdtemp_once("jupyter-clean-cfg")\n\n if env.get("JUPYTER_CONFIG_DIR"):\n return env["JUPYTER_CONFIG_DIR"]\n\n if use_platform_dirs():\n return platformdirs.user_config_dir(APPNAME, appauthor=False)\n\n home_dir = get_home_dir()\n return pjoin(home_dir, ".jupyter")\n\n\ndef jupyter_data_dir() -> str:\n """Get the config directory for Jupyter data files for this platform and user.\n\n These are non-transient, non-configuration files.\n\n Returns JUPYTER_DATA_DIR if defined, else a platform-appropriate path.\n """\n env = os.environ\n\n if env.get("JUPYTER_DATA_DIR"):\n return env["JUPYTER_DATA_DIR"]\n\n if use_platform_dirs():\n return platformdirs.user_data_dir(APPNAME, appauthor=False)\n\n home = get_home_dir()\n\n if sys.platform == "darwin":\n return str(Path(home, "Library", "Jupyter"))\n if sys.platform == "win32":\n appdata = os.environ.get("APPDATA", None)\n if appdata:\n return str(Path(appdata, "jupyter").resolve())\n return pjoin(jupyter_config_dir(), "data")\n # Linux, non-OS X Unix, AIX, etc.\n xdg = env.get("XDG_DATA_HOME", None)\n if not xdg:\n xdg = pjoin(home, ".local", "share")\n return pjoin(xdg, "jupyter")\n\n\ndef jupyter_runtime_dir() -> str:\n """Return the runtime dir for transient jupyter files.\n\n Returns JUPYTER_RUNTIME_DIR if defined.\n\n The default is now (data_dir)/runtime on all platforms;\n we no longer use XDG_RUNTIME_DIR after various problems.\n """\n env = os.environ\n\n if env.get("JUPYTER_RUNTIME_DIR"):\n return env["JUPYTER_RUNTIME_DIR"]\n\n return pjoin(jupyter_data_dir(), "runtime")\n\n\n# %PROGRAMDATA% is not safe by default, require opt-in to trust it\n_use_programdata: bool = envset("JUPYTER_USE_PROGRAMDATA")\n# _win_programdata is a path str if we're using it, None otherwise\n_win_programdata: str | None = None\nif os.name == "nt" and _use_programdata:\n _win_programdata = os.environ.get("PROGRAMDATA", None)\n\n\nif use_platform_dirs():\n if os.name == "nt" and not _use_programdata:\n # default PROGRAMDATA used by site_* is not safe by default on Windows\n SYSTEM_JUPYTER_PATH = [str(Path(sys.prefix, "share", "jupyter"))]\n else:\n SYSTEM_JUPYTER_PATH = platformdirs.site_data_dir(\n APPNAME, appauthor=False, multipath=True\n ).split(os.pathsep)\nelse:\n deprecation(\n "Jupyter is migrating its paths to use standard platformdirs\n"\n "given by the platformdirs library. To remove this warning and\n"\n "see the appropriate new directories, set the environment variable\n"\n "`JUPYTER_PLATFORM_DIRS=1` and then run `jupyter --paths`.\n"\n "The use of platformdirs will be the default in `jupyter_core` v6"\n )\n if os.name == "nt":\n # PROGRAMDATA is not defined by default on XP, and not safe by default\n if _win_programdata:\n SYSTEM_JUPYTER_PATH = [pjoin(_win_programdata, "jupyter")]\n else:\n SYSTEM_JUPYTER_PATH = [str(Path(sys.prefix, "share", "jupyter"))]\n else:\n SYSTEM_JUPYTER_PATH = [\n "/usr/local/share/jupyter",\n "/usr/share/jupyter",\n ]\n\nENV_JUPYTER_PATH: list[str] = [str(Path(sys.prefix, "share", "jupyter"))]\n\n\ndef jupyter_path(*subdirs: str) -> list[str]:\n """Return a list of directories to search for data files.\n\n There are four sources of paths to search:\n\n - $JUPYTER_PATH environment variable (always highest priority)\n - user directories (e.g. ~/.local/share/jupyter)\n - environment directories (e.g. {sys.prefix}/share/jupyter)\n - system-wide paths (e.g. /usr/local/share/jupyter)\n\n JUPYTER_PATH environment variable has highest priority, if defined,\n and is purely additive.\n\n If the JUPYTER_PREFER_ENV_PATH environment variable is set, the environment-level\n directories will have priority over user-level directories.\n You can also set JUPYTER_PREFER_ENV_PATH=0 to explicitly prefer user directories.\n If Jupyter detects that you are in a virtualenv or conda environment,\n environment paths are also preferred to user paths,\n otherwise user paths are preferred to environment paths.\n\n If the Python site.ENABLE_USER_SITE variable is True, we also add the\n appropriate Python user site subdirectory to the user-level directories.\n\n Finally, system-wide directories, such as `/usr/local/share/jupyter` are searched.\n\n If ``*subdirs`` are given, that subdirectory will be added to each element.\n\n\n .. versionchanged:: 5.8\n\n On Windows, %PROGRAMDATA% will be used as a system-wide path only if\n the JUPYTER_USE_PROGRAMDATA env is set.\n By default, there is no default system-wide path on Windows and the env path\n is used instead.\n\n Examples:\n\n >>> jupyter_path()\n ['~/.local/jupyter', '/usr/local/share/jupyter']\n >>> jupyter_path('kernels')\n ['~/.local/jupyter/kernels', '/usr/local/share/jupyter/kernels']\n """\n\n paths: list[str] = []\n\n # highest priority is explicit environment variable\n if os.environ.get("JUPYTER_PATH"):\n paths.extend(p.rstrip(os.sep) for p in os.environ["JUPYTER_PATH"].split(os.pathsep))\n\n # Next is environment or user, depending on the JUPYTER_PREFER_ENV_PATH flag\n user = [jupyter_data_dir()]\n if site.ENABLE_USER_SITE:\n # Check if site.getuserbase() exists to be compatible with virtualenv,\n # which often does not have this method.\n userbase: Optional[str]\n userbase = site.getuserbase() if hasattr(site, "getuserbase") else site.USER_BASE\n\n if userbase:\n userdir = str(Path(userbase, "share", "jupyter"))\n if userdir not in user:\n user.append(userdir)\n\n # Windows usually doesn't have a 'system' prefix,\n # so 'system' and 'env' are the same\n # make sure that env can still be preferred in this case\n if ENV_JUPYTER_PATH == SYSTEM_JUPYTER_PATH:\n env = ENV_JUPYTER_PATH\n else:\n env = [p for p in ENV_JUPYTER_PATH if p not in SYSTEM_JUPYTER_PATH]\n\n if prefer_environment_over_user():\n paths.extend(env)\n paths.extend(user)\n else:\n paths.extend(user)\n paths.extend(env)\n\n # finally, add system paths (can overlap with env, so avoid duplicates)\n for _path in SYSTEM_JUPYTER_PATH:\n if _path not in paths:\n paths.append(_path)\n\n # add subdir, if requested\n if subdirs:\n paths = [pjoin(p, *subdirs) for p in paths]\n return paths\n\n\nENV_CONFIG_PATH: list[str] = [str(Path(sys.prefix, "etc", "jupyter"))]\n\nif use_platform_dirs():\n if os.name == "nt" and not _use_programdata:\n # default PROGRAMDATA is not safe by default on Windows\n # use ENV to avoid an empty list, since some may assume this is non-empty\n SYSTEM_CONFIG_PATH = ENV_CONFIG_PATH[:]\n else:\n SYSTEM_CONFIG_PATH = platformdirs.site_config_dir(\n APPNAME, appauthor=False, multipath=True\n ).split(os.pathsep)\nelif os.name == "nt":\n # PROGRAMDATA is not defined by default on XP, and not safe by default\n # but make sure it's not empty\n if _win_programdata:\n SYSTEM_CONFIG_PATH = [str(Path(_win_programdata, "jupyter"))]\n else:\n SYSTEM_CONFIG_PATH = ENV_CONFIG_PATH[:]\nelse:\n SYSTEM_CONFIG_PATH = [\n "/usr/local/etc/jupyter",\n "/etc/jupyter",\n ]\n\n\ndef jupyter_config_path() -> list[str]:\n """Return the search path for Jupyter config files as a list.\n\n If the JUPYTER_PREFER_ENV_PATH environment variable is set, the\n environment-level directories will have priority over user-level\n directories.\n\n If the Python site.ENABLE_USER_SITE variable is True, we also add the\n appropriate Python user site subdirectory to the user-level directories.\n\n Finally, system-wide directories such as `/usr/local/etc/jupyter` are searched.\n\n\n .. versionchanged:: 5.8\n\n On Windows, %PROGRAMDATA% will be used as a system-wide path only if\n the JUPYTER_USE_PROGRAMDATA env is set.\n By default, there is no system-wide config path on Windows.\n\n Examples:\n\n >>> jupyter_config_path()\n ['~/.jupyter', '~/.local/etc/jupyter', '/usr/local/etc/jupyter', '/etc/jupyter']\n\n """\n if os.environ.get("JUPYTER_NO_CONFIG"):\n # jupyter_config_dir makes a blank config when JUPYTER_NO_CONFIG is set.\n return [jupyter_config_dir()]\n\n paths: list[str] = []\n\n # highest priority is explicit environment variable\n if os.environ.get("JUPYTER_CONFIG_PATH"):\n paths.extend(p.rstrip(os.sep) for p in os.environ["JUPYTER_CONFIG_PATH"].split(os.pathsep))\n\n # Next is environment or user, depending on the JUPYTER_PREFER_ENV_PATH flag\n user = [jupyter_config_dir()]\n if site.ENABLE_USER_SITE:\n userbase: Optional[str]\n # Check if site.getuserbase() exists to be compatible with virtualenv,\n # which often does not have this method.\n userbase = site.getuserbase() if hasattr(site, "getuserbase") else site.USER_BASE\n\n if userbase:\n userdir = str(Path(userbase, "etc", "jupyter"))\n if userdir not in user:\n user.append(userdir)\n\n # Windows usually doesn't have a 'system' prefix,\n # so 'system' and 'env' are the same\n # make sure that env can still be preferred in this case\n if ENV_CONFIG_PATH == SYSTEM_CONFIG_PATH:\n env = ENV_CONFIG_PATH\n else:\n env = [p for p in ENV_CONFIG_PATH if p not in SYSTEM_CONFIG_PATH]\n\n if prefer_environment_over_user():\n paths.extend(env)\n paths.extend(user)\n else:\n paths.extend(user)\n paths.extend(env)\n\n # Finally, system path\n if ENV_CONFIG_PATH != SYSTEM_CONFIG_PATH:\n paths.extend(SYSTEM_CONFIG_PATH)\n return paths\n\n\ndef exists(path: str) -> bool:\n """Replacement for `os.path.exists` which works for host mapped volumes\n on Windows containers\n """\n try:\n os.lstat(path)\n except OSError:\n return False\n return True\n\n\ndef is_file_hidden_win(abs_path: str, stat_res: Optional[Any] = None) -> bool:\n """Is a file hidden?\n\n This only checks the file itself; it should be called in combination with\n checking the directory containing the file.\n\n Use is_hidden() instead to check the file and its parent directories.\n\n Parameters\n ----------\n abs_path : unicode\n The absolute path to check.\n stat_res : os.stat_result, optional\n The result of calling stat() on abs_path. If not passed, this function\n will call stat() internally.\n """\n if Path(abs_path).name.startswith("."):\n return True\n\n if stat_res is None:\n try:\n stat_res = Path(abs_path).stat()\n except OSError as e:\n if e.errno == errno.ENOENT:\n return False\n raise\n\n try:\n if (\n stat_res.st_file_attributes # type:ignore[union-attr]\n & stat.FILE_ATTRIBUTE_HIDDEN # type:ignore[attr-defined]\n ):\n return True\n except AttributeError:\n # allow AttributeError on PyPy for Windows\n # 'stat_result' object has no attribute 'st_file_attributes'\n # https://foss.heptapod.net/pypy/pypy/-/issues/3469\n warnings.warn(\n "hidden files are not detectable on this system, so no file will be marked as hidden.",\n stacklevel=2,\n )\n\n return False\n\n\ndef is_file_hidden_posix(abs_path: str, stat_res: Optional[Any] = None) -> bool:\n """Is a file hidden?\n\n This only checks the file itself; it should be called in combination with\n checking the directory containing the file.\n\n Use is_hidden() instead to check the file and its parent directories.\n\n Parameters\n ----------\n abs_path : unicode\n The absolute path to check.\n stat_res : os.stat_result, optional\n The result of calling stat() on abs_path. If not passed, this function\n will call stat() internally.\n """\n if Path(abs_path).name.startswith("."):\n return True\n\n if stat_res is None or stat.S_ISLNK(stat_res.st_mode):\n try:\n stat_res = Path(abs_path).stat()\n except OSError as e:\n if e.errno == errno.ENOENT:\n return False\n raise\n\n # check that dirs can be listed\n if stat.S_ISDIR(stat_res.st_mode): # noqa: SIM102\n # use x-access, not actual listing, in case of slow/large listings\n if not os.access(abs_path, os.X_OK | os.R_OK):\n return True\n\n # check UF_HIDDEN\n if getattr(stat_res, "st_flags", 0) & UF_HIDDEN:\n return True\n\n return False\n\n\nif sys.platform == "win32":\n is_file_hidden = is_file_hidden_win\nelse:\n is_file_hidden = is_file_hidden_posix\n\n\ndef is_hidden(abs_path: str, abs_root: str = "") -> bool:\n """Is a file hidden or contained in a hidden directory?\n\n This will start with the rightmost path element and work backwards to the\n given root to see if a path is hidden or in a hidden directory. Hidden is\n determined by either name starting with '.' or the UF_HIDDEN flag as\n reported by stat.\n\n If abs_path is the same directory as abs_root, it will be visible even if\n that is a hidden folder. This only checks the visibility of files\n and directories *within* abs_root.\n\n Parameters\n ----------\n abs_path : unicode\n The absolute path to check for hidden directories.\n abs_root : unicode\n The absolute path of the root directory in which hidden directories\n should be checked for.\n """\n abs_path = os.path.normpath(abs_path)\n abs_root = os.path.normpath(abs_root)\n\n if abs_path == abs_root:\n return False\n\n if is_file_hidden(abs_path):\n return True\n\n if not abs_root:\n abs_root = abs_path.split(os.sep, 1)[0] + os.sep\n inside_root = abs_path[len(abs_root) :]\n if any(part.startswith(".") for part in Path(inside_root).parts):\n return True\n\n # check UF_HIDDEN on any location up to root.\n # is_file_hidden() already checked the file, so start from its parent dir\n path = str(Path(abs_path).parent)\n while path and path.startswith(abs_root) and path != abs_root:\n if not Path(path).exists():\n path = str(Path(path).parent)\n continue\n try:\n # may fail on Windows junctions\n st = os.lstat(path)\n except OSError:\n return True\n if getattr(st, "st_flags", 0) & UF_HIDDEN:\n return True\n path = str(Path(path).parent)\n\n return False\n\n\ndef win32_restrict_file_to_user(fname: str) -> None:\n """Secure a windows file to read-only access for the user.\n Follows guidance from win32 library creator:\n http://timgolden.me.uk/python/win32_how_do_i/add-security-to-a-file.html\n\n This method should be executed against an already generated file which\n has no secrets written to it yet.\n\n Parameters\n ----------\n\n fname : unicode\n The path to the file to secure\n """\n try:\n import win32api\n except ImportError:\n return _win32_restrict_file_to_user_ctypes(fname)\n\n import ntsecuritycon as con\n import win32security\n\n # everyone, _domain, _type = win32security.LookupAccountName("", "Everyone")\n admins = win32security.CreateWellKnownSid(win32security.WinBuiltinAdministratorsSid)\n user, _domain, _type = win32security.LookupAccountName(\n "", win32api.GetUserNameEx(win32api.NameSamCompatible)\n )\n\n sd = win32security.GetFileSecurity(fname, win32security.DACL_SECURITY_INFORMATION)\n\n dacl = win32security.ACL()\n # dacl.AddAccessAllowedAce(win32security.ACL_REVISION, con.FILE_ALL_ACCESS, everyone)\n dacl.AddAccessAllowedAce(\n win32security.ACL_REVISION,\n con.FILE_GENERIC_READ | con.FILE_GENERIC_WRITE | con.DELETE,\n user,\n )\n dacl.AddAccessAllowedAce(win32security.ACL_REVISION, con.FILE_ALL_ACCESS, admins)\n\n sd.SetSecurityDescriptorDacl(1, dacl, 0)\n win32security.SetFileSecurity(fname, win32security.DACL_SECURITY_INFORMATION, sd)\n return None\n\n\ndef _win32_restrict_file_to_user_ctypes(fname: str) -> None:\n """Secure a windows file to read-only access for the user.\n\n Follows guidance from win32 library creator:\n http://timgolden.me.uk/python/win32_how_do_i/add-security-to-a-file.html\n\n This method should be executed against an already generated file which\n has no secrets written to it yet.\n\n Parameters\n ----------\n\n fname : unicode\n The path to the file to secure\n """\n import ctypes\n from ctypes import wintypes\n\n advapi32 = ctypes.WinDLL("advapi32", use_last_error=True) # type:ignore[attr-defined]\n secur32 = ctypes.WinDLL("secur32", use_last_error=True) # type:ignore[attr-defined]\n\n NameSamCompatible = 2\n WinBuiltinAdministratorsSid = 26\n DACL_SECURITY_INFORMATION = 4\n ACL_REVISION = 2\n ERROR_INSUFFICIENT_BUFFER = 122\n ERROR_MORE_DATA = 234\n\n SYNCHRONIZE = 0x100000\n DELETE = 0x00010000\n STANDARD_RIGHTS_REQUIRED = 0xF0000\n STANDARD_RIGHTS_READ = 0x20000\n STANDARD_RIGHTS_WRITE = 0x20000\n FILE_READ_DATA = 1\n FILE_READ_EA = 8\n FILE_READ_ATTRIBUTES = 128\n FILE_WRITE_DATA = 2\n FILE_APPEND_DATA = 4\n FILE_WRITE_EA = 16\n FILE_WRITE_ATTRIBUTES = 256\n FILE_ALL_ACCESS = STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 0x1FF\n FILE_GENERIC_READ = (\n STANDARD_RIGHTS_READ | FILE_READ_DATA | FILE_READ_ATTRIBUTES | FILE_READ_EA | SYNCHRONIZE\n )\n FILE_GENERIC_WRITE = (\n STANDARD_RIGHTS_WRITE\n | FILE_WRITE_DATA\n | FILE_WRITE_ATTRIBUTES\n | FILE_WRITE_EA\n | FILE_APPEND_DATA\n | SYNCHRONIZE\n )\n\n class ACL(ctypes.Structure):\n _fields_ = [\n ("AclRevision", wintypes.BYTE),\n ("Sbz1", wintypes.BYTE),\n ("AclSize", wintypes.WORD),\n ("AceCount", wintypes.WORD),\n ("Sbz2", wintypes.WORD),\n ]\n\n PSID = ctypes.c_void_p\n PACL = ctypes.POINTER(ACL)\n PSECURITY_DESCRIPTOR = ctypes.POINTER(wintypes.BYTE)\n\n def _nonzero_success(result: int, func: Any, args: Any) -> Any: # noqa: ARG001\n if not result:\n raise ctypes.WinError(ctypes.get_last_error()) # type:ignore[attr-defined]\n return args\n\n secur32.GetUserNameExW.errcheck = _nonzero_success\n secur32.GetUserNameExW.restype = wintypes.BOOL\n secur32.GetUserNameExW.argtypes = (\n ctypes.c_int, # EXTENDED_NAME_FORMAT NameFormat\n wintypes.LPWSTR, # LPWSTR lpNameBuffer,\n wintypes.PULONG, # PULONG nSize\n )\n\n advapi32.CreateWellKnownSid.errcheck = _nonzero_success\n advapi32.CreateWellKnownSid.restype = wintypes.BOOL\n advapi32.CreateWellKnownSid.argtypes = (\n wintypes.DWORD, # WELL_KNOWN_SID_TYPE WellKnownSidType\n PSID, # PSID DomainSid\n PSID, # PSID pSid\n wintypes.PDWORD, # DWORD *cbSid\n )\n\n advapi32.LookupAccountNameW.errcheck = _nonzero_success\n advapi32.LookupAccountNameW.restype = wintypes.BOOL\n advapi32.LookupAccountNameW.argtypes = (\n wintypes.LPWSTR, # LPCWSTR lpSystemName\n wintypes.LPWSTR, # LPCWSTR lpAccountName\n PSID, # PSID Sid\n wintypes.LPDWORD, # LPDWORD cbSid\n wintypes.LPWSTR, # LPCWSTR ReferencedDomainName\n wintypes.LPDWORD, # LPDWORD cchReferencedDomainName\n wintypes.LPDWORD, # PSID_NAME_USE peUse\n )\n\n advapi32.AddAccessAllowedAce.errcheck = _nonzero_success\n advapi32.AddAccessAllowedAce.restype = wintypes.BOOL\n advapi32.AddAccessAllowedAce.argtypes = (\n PACL, # PACL pAcl\n wintypes.DWORD, # DWORD dwAceRevision\n wintypes.DWORD, # DWORD AccessMask\n PSID, # PSID pSid\n )\n\n advapi32.SetSecurityDescriptorDacl.errcheck = _nonzero_success\n advapi32.SetSecurityDescriptorDacl.restype = wintypes.BOOL\n advapi32.SetSecurityDescriptorDacl.argtypes = (\n PSECURITY_DESCRIPTOR, # PSECURITY_DESCRIPTOR pSecurityDescriptor\n wintypes.BOOL, # BOOL bDaclPresent\n PACL, # PACL pDacl\n wintypes.BOOL, # BOOL bDaclDefaulted\n )\n\n advapi32.GetFileSecurityW.errcheck = _nonzero_success\n advapi32.GetFileSecurityW.restype = wintypes.BOOL\n advapi32.GetFileSecurityW.argtypes = (\n wintypes.LPCWSTR, # LPCWSTR lpFileName\n wintypes.DWORD, # SECURITY_INFORMATION RequestedInformation\n PSECURITY_DESCRIPTOR, # PSECURITY_DESCRIPTOR pSecurityDescriptor\n wintypes.DWORD, # DWORD nLength\n wintypes.LPDWORD, # LPDWORD lpnLengthNeeded\n )\n\n advapi32.SetFileSecurityW.errcheck = _nonzero_success\n advapi32.SetFileSecurityW.restype = wintypes.BOOL\n advapi32.SetFileSecurityW.argtypes = (\n wintypes.LPCWSTR, # LPCWSTR lpFileName\n wintypes.DWORD, # SECURITY_INFORMATION SecurityInformation\n PSECURITY_DESCRIPTOR, # PSECURITY_DESCRIPTOR pSecurityDescriptor\n )\n\n advapi32.MakeAbsoluteSD.errcheck = _nonzero_success\n advapi32.MakeAbsoluteSD.restype = wintypes.BOOL\n advapi32.MakeAbsoluteSD.argtypes = (\n PSECURITY_DESCRIPTOR, # pSelfRelativeSecurityDescriptor\n PSECURITY_DESCRIPTOR, # pAbsoluteSecurityDescriptor\n wintypes.LPDWORD, # LPDWORD lpdwAbsoluteSecurityDescriptorSize\n PACL, # PACL pDacl\n wintypes.LPDWORD, # LPDWORD lpdwDaclSize\n PACL, # PACL pSacl\n wintypes.LPDWORD, # LPDWORD lpdwSaclSize\n PSID, # PSID pOwner\n wintypes.LPDWORD, # LPDWORD lpdwOwnerSize\n PSID, # PSID pPrimaryGroup\n wintypes.LPDWORD, # LPDWORD lpdwPrimaryGroupSize\n )\n\n advapi32.MakeSelfRelativeSD.errcheck = _nonzero_success\n advapi32.MakeSelfRelativeSD.restype = wintypes.BOOL\n advapi32.MakeSelfRelativeSD.argtypes = (\n PSECURITY_DESCRIPTOR, # pAbsoluteSecurityDescriptor\n PSECURITY_DESCRIPTOR, # pSelfRelativeSecurityDescriptor\n wintypes.LPDWORD, # LPDWORD lpdwBufferLength\n )\n\n advapi32.InitializeAcl.errcheck = _nonzero_success\n advapi32.InitializeAcl.restype = wintypes.BOOL\n advapi32.InitializeAcl.argtypes = (\n PACL, # PACL pAcl,\n wintypes.DWORD, # DWORD nAclLength,\n wintypes.DWORD, # DWORD dwAclRevision\n )\n\n def CreateWellKnownSid(WellKnownSidType: Any) -> Any:\n # return a SID for predefined aliases\n pSid = (ctypes.c_char * 1)()\n cbSid = wintypes.DWORD()\n try:\n advapi32.CreateWellKnownSid(WellKnownSidType, None, pSid, ctypes.byref(cbSid))\n except OSError as e:\n if e.winerror != ERROR_INSUFFICIENT_BUFFER: # type:ignore[attr-defined]\n raise\n pSid = (ctypes.c_char * cbSid.value)()\n advapi32.CreateWellKnownSid(WellKnownSidType, None, pSid, ctypes.byref(cbSid))\n return pSid[:]\n\n def GetUserNameEx(NameFormat: Any) -> Any:\n # return the user or other security principal associated with\n # the calling thread\n nSize = ctypes.pointer(ctypes.c_ulong(0))\n try:\n secur32.GetUserNameExW(NameFormat, None, nSize)\n except OSError as e:\n if e.winerror != ERROR_MORE_DATA: # type:ignore[attr-defined]\n raise\n if not nSize.contents.value:\n return None\n lpNameBuffer = ctypes.create_unicode_buffer(nSize.contents.value)\n secur32.GetUserNameExW(NameFormat, lpNameBuffer, nSize)\n return lpNameBuffer.value\n\n def LookupAccountName(lpSystemName: Any, lpAccountName: Any) -> Any:\n # return a security identifier (SID) for an account on a system\n # and the name of the domain on which the account was found\n cbSid = wintypes.DWORD(0)\n cchReferencedDomainName = wintypes.DWORD(0)\n peUse = wintypes.DWORD(0)\n try:\n advapi32.LookupAccountNameW(\n lpSystemName,\n lpAccountName,\n None,\n ctypes.byref(cbSid),\n None,\n ctypes.byref(cchReferencedDomainName),\n ctypes.byref(peUse),\n )\n except OSError as e:\n if e.winerror != ERROR_INSUFFICIENT_BUFFER: # type:ignore[attr-defined]\n raise\n Sid = ctypes.create_unicode_buffer("", cbSid.value)\n pSid = ctypes.cast(ctypes.pointer(Sid), wintypes.LPVOID)\n lpReferencedDomainName = ctypes.create_unicode_buffer("", cchReferencedDomainName.value + 1)\n success = advapi32.LookupAccountNameW(\n lpSystemName,\n lpAccountName,\n pSid,\n ctypes.byref(cbSid),\n lpReferencedDomainName,\n ctypes.byref(cchReferencedDomainName),\n ctypes.byref(peUse),\n )\n if not success:\n raise ctypes.WinError() # type:ignore[attr-defined]\n return pSid, lpReferencedDomainName.value, peUse.value\n\n def AddAccessAllowedAce(pAcl: Any, dwAceRevision: Any, AccessMask: Any, pSid: Any) -> Any:\n # add an access-allowed access control entry (ACE)\n # to an access control list (ACL)\n advapi32.AddAccessAllowedAce(pAcl, dwAceRevision, AccessMask, pSid)\n\n def GetFileSecurity(lpFileName: Any, RequestedInformation: Any) -> Any:\n # return information about the security of a file or directory\n nLength = wintypes.DWORD(0)\n try:\n advapi32.GetFileSecurityW(\n lpFileName,\n RequestedInformation,\n None,\n 0,\n ctypes.byref(nLength),\n )\n except OSError as e:\n if e.winerror != ERROR_INSUFFICIENT_BUFFER: # type:ignore[attr-defined]\n raise\n if not nLength.value:\n return None\n pSecurityDescriptor = (wintypes.BYTE * nLength.value)()\n advapi32.GetFileSecurityW(\n lpFileName,\n RequestedInformation,\n pSecurityDescriptor,\n nLength,\n ctypes.byref(nLength),\n )\n return pSecurityDescriptor\n\n def SetFileSecurity(\n lpFileName: Any, RequestedInformation: Any, pSecurityDescriptor: Any\n ) -> Any:\n # set the security of a file or directory object\n advapi32.SetFileSecurityW(lpFileName, RequestedInformation, pSecurityDescriptor)\n\n def SetSecurityDescriptorDacl(\n pSecurityDescriptor: Any, bDaclPresent: Any, pDacl: Any, bDaclDefaulted: Any\n ) -> Any:\n # set information in a discretionary access control list (DACL)\n advapi32.SetSecurityDescriptorDacl(pSecurityDescriptor, bDaclPresent, pDacl, bDaclDefaulted)\n\n def MakeAbsoluteSD(pSelfRelativeSecurityDescriptor: Any) -> Any:\n # return a security descriptor in absolute format\n # by using a security descriptor in self-relative format as a template\n pAbsoluteSecurityDescriptor = None\n lpdwAbsoluteSecurityDescriptorSize = wintypes.DWORD(0)\n pDacl = None\n lpdwDaclSize = wintypes.DWORD(0)\n pSacl = None\n lpdwSaclSize = wintypes.DWORD(0)\n pOwner = None\n lpdwOwnerSize = wintypes.DWORD(0)\n pPrimaryGroup = None\n lpdwPrimaryGroupSize = wintypes.DWORD(0)\n try:\n advapi32.MakeAbsoluteSD(\n pSelfRelativeSecurityDescriptor,\n pAbsoluteSecurityDescriptor,\n ctypes.byref(lpdwAbsoluteSecurityDescriptorSize),\n pDacl,\n ctypes.byref(lpdwDaclSize),\n pSacl,\n ctypes.byref(lpdwSaclSize),\n pOwner,\n ctypes.byref(lpdwOwnerSize),\n pPrimaryGroup,\n ctypes.byref(lpdwPrimaryGroupSize),\n )\n except OSError as e:\n if e.winerror != ERROR_INSUFFICIENT_BUFFER: # type:ignore[attr-defined]\n raise\n pAbsoluteSecurityDescriptor = (wintypes.BYTE * lpdwAbsoluteSecurityDescriptorSize.value)()\n pDaclData = (wintypes.BYTE * lpdwDaclSize.value)()\n pDacl = ctypes.cast(pDaclData, PACL).contents\n pSaclData = (wintypes.BYTE * lpdwSaclSize.value)()\n pSacl = ctypes.cast(pSaclData, PACL).contents\n pOwnerData = (wintypes.BYTE * lpdwOwnerSize.value)()\n pOwner = ctypes.cast(pOwnerData, PSID)\n pPrimaryGroupData = (wintypes.BYTE * lpdwPrimaryGroupSize.value)()\n pPrimaryGroup = ctypes.cast(pPrimaryGroupData, PSID)\n advapi32.MakeAbsoluteSD(\n pSelfRelativeSecurityDescriptor,\n pAbsoluteSecurityDescriptor,\n ctypes.byref(lpdwAbsoluteSecurityDescriptorSize),\n pDacl,\n ctypes.byref(lpdwDaclSize),\n pSacl,\n ctypes.byref(lpdwSaclSize),\n pOwner,\n lpdwOwnerSize,\n pPrimaryGroup,\n ctypes.byref(lpdwPrimaryGroupSize),\n )\n return pAbsoluteSecurityDescriptor\n\n def MakeSelfRelativeSD(pAbsoluteSecurityDescriptor: Any) -> Any:\n # return a security descriptor in self-relative format\n # by using a security descriptor in absolute format as a template\n pSelfRelativeSecurityDescriptor = None\n lpdwBufferLength = wintypes.DWORD(0)\n try:\n advapi32.MakeSelfRelativeSD(\n pAbsoluteSecurityDescriptor,\n pSelfRelativeSecurityDescriptor,\n ctypes.byref(lpdwBufferLength),\n )\n except OSError as e:\n if e.winerror != ERROR_INSUFFICIENT_BUFFER: # type:ignore[attr-defined]\n raise\n pSelfRelativeSecurityDescriptor = (wintypes.BYTE * lpdwBufferLength.value)()\n advapi32.MakeSelfRelativeSD(\n pAbsoluteSecurityDescriptor,\n pSelfRelativeSecurityDescriptor,\n ctypes.byref(lpdwBufferLength),\n )\n return pSelfRelativeSecurityDescriptor\n\n def NewAcl() -> Any:\n # return a new, initialized ACL (access control list) structure\n nAclLength = 32767 # TODO: calculate this: ctypes.sizeof(ACL) + ?\n acl_data = ctypes.create_string_buffer(nAclLength)\n pAcl = ctypes.cast(acl_data, PACL).contents\n advapi32.InitializeAcl(pAcl, nAclLength, ACL_REVISION)\n return pAcl\n\n SidAdmins = CreateWellKnownSid(WinBuiltinAdministratorsSid)\n SidUser = LookupAccountName("", GetUserNameEx(NameSamCompatible))[0]\n\n Acl = NewAcl()\n AddAccessAllowedAce(Acl, ACL_REVISION, FILE_ALL_ACCESS, SidAdmins)\n AddAccessAllowedAce(\n Acl,\n ACL_REVISION,\n FILE_GENERIC_READ | FILE_GENERIC_WRITE | DELETE,\n SidUser,\n )\n\n SelfRelativeSD = GetFileSecurity(fname, DACL_SECURITY_INFORMATION)\n AbsoluteSD = MakeAbsoluteSD(SelfRelativeSD)\n SetSecurityDescriptorDacl(AbsoluteSD, 1, Acl, 0)\n SelfRelativeSD = MakeSelfRelativeSD(AbsoluteSD)\n\n SetFileSecurity(fname, DACL_SECURITY_INFORMATION, SelfRelativeSD)\n\n\ndef get_file_mode(fname: str) -> int:\n """Retrieves the file mode corresponding to fname in a filesystem-tolerant manner.\n\n Parameters\n ----------\n\n fname : unicode\n The path to the file to get mode from\n\n """\n # Some filesystems (e.g., CIFS) auto-enable the execute bit on files. As a result, we\n # should tolerate the execute bit on the file's owner when validating permissions - thus\n # the missing least significant bit on the third octal digit. In addition, we also tolerate\n # the sticky bit being set, so the lsb from the fourth octal digit is also removed.\n return (\n stat.S_IMODE(Path(fname).stat().st_mode) & 0o6677\n ) # Use 4 octal digits since S_IMODE does the same\n\n\nallow_insecure_writes = os.getenv("JUPYTER_ALLOW_INSECURE_WRITES", "false").lower() in ("true", "1")\n\n\n@contextmanager\ndef secure_write(fname: str, binary: bool = False) -> Iterator[Any]:\n """Opens a file in the most restricted pattern available for\n writing content. This limits the file mode to `0o0600` and yields\n the resulting opened filed handle.\n\n Parameters\n ----------\n\n fname : unicode\n The path to the file to write\n\n binary: boolean\n Indicates that the file is binary\n """\n mode = "wb" if binary else "w"\n encoding = None if binary else "utf-8"\n open_flag = os.O_CREAT | os.O_WRONLY | os.O_TRUNC\n try:\n Path(fname).unlink()\n except OSError:\n # Skip any issues with the file not existing\n pass\n\n if os.name == "nt":\n if allow_insecure_writes:\n # Mounted file systems can have a number of failure modes inside this block.\n # For windows machines in insecure mode we simply skip this to avoid failures :/\n issue_insecure_write_warning()\n else:\n # Python on windows does not respect the group and public bits for chmod, so we need\n # to take additional steps to secure the contents.\n # Touch file preemptively to avoid editing permissions in open files in Windows\n fd = os.open(fname, open_flag, 0o0600)\n os.close(fd)\n open_flag = os.O_WRONLY | os.O_TRUNC\n win32_restrict_file_to_user(fname)\n\n with os.fdopen(os.open(fname, open_flag, 0o0600), mode, encoding=encoding) as f:\n if os.name != "nt":\n # Enforce that the file got the requested permissions before writing\n file_mode = get_file_mode(fname)\n if file_mode != 0o0600:\n if allow_insecure_writes:\n issue_insecure_write_warning()\n else:\n msg = (\n f"Permissions assignment failed for secure file: '{fname}'."\n f" Got '{oct(file_mode)}' instead of '0o0600'."\n )\n raise RuntimeError(msg)\n yield f\n\n\ndef issue_insecure_write_warning() -> None:\n """Issue an insecure write warning."""\n\n def format_warning(msg: str, *args: Any, **kwargs: Any) -> str: # noqa: ARG001\n return str(msg) + "\n"\n\n warnings.formatwarning = format_warning # type:ignore[assignment]\n warnings.warn(\n "WARNING: Insecure writes have been enabled via environment variable "\n "'JUPYTER_ALLOW_INSECURE_WRITES'! If this is not intended, remove the "\n "variable or set its value to 'False'.",\n stacklevel=2,\n )\n | .venv\Lib\site-packages\jupyter_core\paths.py | paths.py | Python | 38,804 | 0.95 | 0.161818 | 0.091314 | vue-tools | 542 | 2024-11-13T06:10:31.870896 | MIT | false | 082c0db314acad35d437f50312619694 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.