{ "cells": [ { "cell_type": "code", "source": [ "!pip install -U pip setuptools\n", "!pip install typing-extensions==4.6.1" ], "metadata": { "id": "FZWn9_2Axkjp", "colab": { "base_uri": "https://localhost:8080/", "height": 1000 }, "outputId": "54e46e9d-fd57-4c39-f739-937dd6fc295e" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Requirement already satisfied: pip in /usr/local/lib/python3.10/dist-packages (23.1.2)\n", "Collecting pip\n", " Downloading pip-24.0-py3-none-any.whl (2.1 MB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.1/2.1 MB\u001b[0m \u001b[31m28.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hRequirement already satisfied: setuptools in /usr/local/lib/python3.10/dist-packages (67.7.2)\n", "Collecting setuptools\n", " Using cached setuptools-70.0.0-py3-none-any.whl (863 kB)\n", "Installing collected packages: setuptools, pip\n", " Attempting uninstall: setuptools\n", " Found existing installation: setuptools 67.7.2\n", " Uninstalling setuptools-67.7.2:\n", " Successfully uninstalled setuptools-67.7.2\n", " Attempting uninstall: pip\n", " Found existing installation: pip 23.1.2\n", " Uninstalling pip-23.1.2:\n", " Successfully uninstalled pip-23.1.2\n", "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", "ipython 7.34.0 requires jedi>=0.16, which is not installed.\u001b[0m\u001b[31m\n", "\u001b[0mSuccessfully installed pip-24.0 setuptools-70.0.0\n" ] }, { "output_type": "display_data", "data": { "application/vnd.colab-display-data+json": { "pip_warning": { "packages": [ "_distutils_hack", "pkg_resources", "setuptools" ] }, "id": "90eeb7244ebb47f3bb9a9162f2024942" } }, "metadata": {} }, { "output_type": "stream", "name": "stdout", "text": [ "Collecting typing-extensions==4.6.1\n", " Downloading typing_extensions-4.6.1-py3-none-any.whl.metadata (2.8 kB)\n", "Downloading typing_extensions-4.6.1-py3-none-any.whl (31 kB)\n", "Traceback (most recent call last):\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pkg_resources/__init__.py\", line 3109, in _dep_map\n", " return self.__dep_map\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pkg_resources/__init__.py\", line 2902, in __getattr__\n", " raise AttributeError(attr)\n", "AttributeError: _DistInfoDistribution__dep_map\n", "\n", "During handling of the above exception, another exception occurred:\n", "\n", "Traceback (most recent call last):\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_internal/cli/base_command.py\", line 180, in exc_logging_wrapper\n", " status = run_func(*args)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_internal/cli/req_command.py\", line 245, in wrapper\n", " return func(self, options, args)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_internal/commands/install.py\", line 444, in run\n", " conflicts = self._determine_conflicts(to_install)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_internal/commands/install.py\", line 575, in _determine_conflicts\n", " return check_install_conflicts(to_install)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_internal/operations/check.py\", line 106, in check_install_conflicts\n", " package_set, _ = create_package_set_from_installed()\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_internal/operations/check.py\", line 45, in create_package_set_from_installed\n", " dependencies = list(dist.iter_dependencies())\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_internal/metadata/pkg_resources.py\", line 221, in iter_dependencies\n", " return self._dist.requires(extras)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pkg_resources/__init__.py\", line 2822, in requires\n", " dm = self._dep_map\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pkg_resources/__init__.py\", line 3111, in _dep_map\n", " self.__dep_map = self._compute_dependencies()\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pkg_resources/__init__.py\", line 3121, in _compute_dependencies\n", " reqs.extend(parse_requirements(req))\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pkg_resources/__init__.py\", line 3174, in __init__\n", " super(Requirement, self).__init__(requirement_string)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/packaging/requirements.py\", line 102, in __init__\n", " req = REQUIREMENT.parseString(requirement_string)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/util.py\", line 256, in _inner\n", " return fn(self, *args, **kwargs)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 1180, in parse_string\n", " loc, tokens = self._parse(instring, 0)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 845, in _parseNoCache\n", " loc, tokens = self.parseImpl(instring, pre_loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 4038, in parseImpl\n", " loc, exprtokens = e._parse(instring, loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 845, in _parseNoCache\n", " loc, tokens = self.parseImpl(instring, pre_loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 4272, in parseImpl\n", " return e._parse(\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 845, in _parseNoCache\n", " loc, tokens = self.parseImpl(instring, pre_loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 4038, in parseImpl\n", " loc, exprtokens = e._parse(instring, loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 845, in _parseNoCache\n", " loc, tokens = self.parseImpl(instring, pre_loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 5214, in parseImpl\n", " loc, tokens = self_expr._parse(instring, loc, doActions, callPreParse=False)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 845, in _parseNoCache\n", " loc, tokens = self.parseImpl(instring, pre_loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 4038, in parseImpl\n", " loc, exprtokens = e._parse(instring, loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 845, in _parseNoCache\n", " loc, tokens = self.parseImpl(instring, pre_loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 4038, in parseImpl\n", " loc, exprtokens = e._parse(instring, loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 845, in _parseNoCache\n", " loc, tokens = self.parseImpl(instring, pre_loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 5485, in parseImpl\n", " return super().parseImpl(instring, loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 4548, in parseImpl\n", " return self.expr._parse(instring, loc, doActions, callPreParse=False)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 845, in _parseNoCache\n", " loc, tokens = self.parseImpl(instring, pre_loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 4038, in parseImpl\n", " loc, exprtokens = e._parse(instring, loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 845, in _parseNoCache\n", " loc, tokens = self.parseImpl(instring, pre_loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 5080, in parseImpl\n", " return super().parseImpl(instring, loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 4978, in parseImpl\n", " loc, tokens = self_expr_parse(instring, loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 845, in _parseNoCache\n", " loc, tokens = self.parseImpl(instring, pre_loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 4038, in parseImpl\n", " loc, exprtokens = e._parse(instring, loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 845, in _parseNoCache\n", " loc, tokens = self.parseImpl(instring, pre_loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 5485, in parseImpl\n", " return super().parseImpl(instring, loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 4548, in parseImpl\n", " return self.expr._parse(instring, loc, doActions, callPreParse=False)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 845, in _parseNoCache\n", " loc, tokens = self.parseImpl(instring, pre_loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 4016, in parseImpl\n", " loc, resultlist = self.exprs[0]._parse(\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 845, in _parseNoCache\n", " loc, tokens = self.parseImpl(instring, pre_loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 4272, in parseImpl\n", " return e._parse(\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 845, in _parseNoCache\n", " loc, tokens = self.parseImpl(instring, pre_loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 4548, in parseImpl\n", " return self.expr._parse(instring, loc, doActions, callPreParse=False)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 845, in _parseNoCache\n", " loc, tokens = self.parseImpl(instring, pre_loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 4038, in parseImpl\n", " loc, exprtokens = e._parse(instring, loc, doActions)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/core.py\", line 853, in _parseNoCache\n", " ret_tokens = ParseResults(\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_vendor/pyparsing/results.py\", line 148, in __new__\n", " def __new__(cls, toklist=None, name=None, **kwargs):\n", "KeyboardInterrupt\n", "\n", "During handling of the above exception, another exception occurred:\n", "\n", "Traceback (most recent call last):\n", " File \"/usr/local/bin/pip\", line 8, in \n", " sys.exit(main())\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_internal/cli/main.py\", line 79, in main\n", " return command.main(cmd_args)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_internal/cli/base_command.py\", line 101, in main\n", " return self._main(args)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_internal/cli/base_command.py\", line 234, in _main\n", " return run(options, args)\n", " File \"/usr/local/lib/python3.10/dist-packages/pip/_internal/cli/base_command.py\", line 217, in exc_logging_wrapper\n", " logger.critical(\"Operation cancelled by user\")\n", " File \"/usr/lib/python3.10/logging/__init__.py\", line 1514, in critical\n", " def critical(self, msg, *args, **kwargs):\n", "KeyboardInterrupt\n", "^C\n" ] } ] }, { "cell_type": "code", "source": [ "# Import required libraries and install any necessary packages\n", "import spacy\n", "from spacy.tokens import DocBin\n", "from tqdm import tqdm\n", "import json\n", "\n", "# Check the installed version of spaCy\n", "spacy.__version__\n", "\n", "# Check GPU information\n", "!nvidia-smi" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "HwbnRYy411rk", "outputId": "6592b93b-fa35-4921-87fa-96ecafe5193d" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Mon Jun 10 20:42:21 2024 \n", "+---------------------------------------------------------------------------------------+\n", "| NVIDIA-SMI 535.104.05 Driver Version: 535.104.05 CUDA Version: 12.2 |\n", "|-----------------------------------------+----------------------+----------------------+\n", "| GPU Name Persistence-M | Bus-Id Disp.A | Volatile Uncorr. ECC |\n", "| Fan Temp Perf Pwr:Usage/Cap | Memory-Usage | GPU-Util Compute M. |\n", "| | | MIG M. |\n", "|=========================================+======================+======================|\n", "| 0 Tesla T4 Off | 00000000:00:04.0 Off | 0 |\n", "| N/A 60C P8 12W / 70W | 3MiB / 15360MiB | 0% Default |\n", "| | | N/A |\n", "+-----------------------------------------+----------------------+----------------------+\n", " \n", "+---------------------------------------------------------------------------------------+\n", "| Processes: |\n", "| GPU GI CI PID Type Process name GPU Memory |\n", "| ID ID Usage |\n", "|=======================================================================================|\n", "| No running processes found |\n", "+---------------------------------------------------------------------------------------+\n" ] } ] }, { "cell_type": "code", "source": [ "cv_data = json.load(open('/content/Files/annotations_new.json', 'r'))" ], "metadata": { "id": "R6vzzs0c13Xj" }, "execution_count": null, "outputs": [] }, { "cell_type": "code", "source": [ "# Display the number of items in the dataset\n", "len(cv_data)\n", "\n", "# Display the first item in the dataset\n", "cv_data[0]" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "25Tlx1JI2aVE", "outputId": "7bc60caf-f7c2-4050-9b4c-2d1d628e1072" }, "execution_count": null, "outputs": [ { "output_type": "execute_result", "data": { "text/plain": [ "[\"Good afternoon. Is everybody ready for timeout? Let's start with a round of introductions. My name is Dr. Lexi and I'm a cardiothoracic surgeon. Here we are with Mr. Johnson, MRN 654321. He is scheduled to undergo a coronary artery bypass grafting, and we expect to use a saphenous vein graft. Sites are marked and consent is verified. Beta blockers, allergies, antibiotics, risk and blood loss is minimal. and blood loss is minimal. The patient lies in the supine position. Radiology studies are on standby and there's no requirement for blood products. Fire risk is two and expected duration is four hours. Antibiotic redosing at four hours. The patient has a warming blanket and leg compression devices are applied. Does everyone agree? Is there anything to add?\\r\",\n", " {'entities': [[0, 15, 'GREETING'],\n", " [29, 34, 'READY'],\n", " [102, 110, 'NAME'],\n", " [121, 144, 'MEDICAL_ROLE'],\n", " [162, 173, 'NAME'],\n", " [175, 186, 'MRN'],\n", " [216, 247, 'MEDICAL_PROCEDURE'],\n", " [272, 293, 'MEDS/EQUIPMENT'],\n", " [294, 310, 'SITES'],\n", " [315, 335, 'CONSENT'],\n", " [336, 349, 'BETA-BLOCKERS'],\n", " [351, 360, 'ALLERGY'],\n", " [362, 373, 'ANTIBIOTIC'],\n", " [384, 406, 'BLOOD LOSS'],\n", " [411, 433, 'BLOOD LOSS'],\n", " [458, 474, 'POSITION'],\n", " [497, 507, 'RADIOLOGY'],\n", " [539, 554, 'MEDS/EQUIPMENT'],\n", " [555, 571, 'FIRE RISK'],\n", " [597, 608, 'DURATION'],\n", " [609, 628, 'ANTIBIOTIC'],\n", " [632, 643, 'DURATION'],\n", " [662, 677, 'MEDS/EQUIPMENT'],\n", " [682, 705, 'MEDS/EQUIPMENT'],\n", " [733, 738, 'AGREE']]}]" ] }, "metadata": {}, "execution_count": 4 } ] }, { "cell_type": "code", "source": [ "# Initialize spaCy configuration files by copying from base_config to config.cfg\n", "# !python -m spacy init fill-config /\"your_file_path\"/base_config.cfg /\"your_file_path\"/config/config.cfg\n", "!python -m spacy init fill-config /content/Files/base_config.cfg /content/Files/config.cfg" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "ykLOMo5P2btE", "outputId": "e5c9211d-6951-415f-8cb2-ca9765ba09aa" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "/usr/local/lib/python3.10/dist-packages/transformers/utils/generic.py:441: UserWarning: torch.utils._pytree._register_pytree_node is deprecated. Please use torch.utils._pytree.register_pytree_node instead.\n", " _torch_pytree._register_pytree_node(\n", "/usr/local/lib/python3.10/dist-packages/transformers/utils/generic.py:309: UserWarning: torch.utils._pytree._register_pytree_node is deprecated. Please use torch.utils._pytree.register_pytree_node instead.\n", " _torch_pytree._register_pytree_node(\n", "\u001b[38;5;2m✔ Auto-filled config with all values\u001b[0m\n", "\u001b[38;5;2m✔ Saved config\u001b[0m\n", "/content/Files/config.cfg\n", "You can now add your data and train your pipeline:\n", "python -m spacy train config.cfg --paths.train ./train.spacy --paths.dev ./dev.spacy\n" ] } ] }, { "cell_type": "code", "source": [ "# Define a function to create spaCy DocBin objects from the annotated data\n", "def get_spacy_doc(file, data):\n", " # Create a blank spaCy pipeline\n", " nlp = spacy.blank('en')\n", " db = DocBin()\n", "\n", " # Iterate through the data\n", " for text, annot in tqdm(data):\n", " doc = nlp.make_doc(text)\n", " annot = annot['entities']\n", "\n", " ents = []\n", " entity_indices = []\n", "\n", " # Extract entities from the annotations\n", " for start, end, label in annot:\n", " skip_entity = False\n", " for idx in range(start, end):\n", " if idx in entity_indices:\n", " skip_entity = True\n", " break\n", " if skip_entity:\n", " continue\n", "\n", " entity_indices = entity_indices + list(range(start, end))\n", " try:\n", " span = doc.char_span(start, end, label=label, alignment_mode='strict')\n", " except:\n", " continue\n", "\n", " if span is None:\n", " # Log errors for annotations that couldn't be processed\n", " err_data = str([start, end]) + \" \" + str(text) + \"\\n\"\n", " file.write(err_data)\n", " else:\n", " ents.append(span)\n", "\n", " try:\n", " doc.ents = ents\n", " db.add(doc)\n", " except:\n", " pass\n", "\n", " return db" ], "metadata": { "id": "k3SnB8Ky2grk" }, "execution_count": null, "outputs": [] }, { "cell_type": "code", "source": [ "# Split the annotated data into training and testing sets\n", "from sklearn.model_selection import train_test_split\n", "train, test = train_test_split(cv_data, test_size=0.2)\n", "\n", "# Display the number of items in the training and testing sets\n", "len(train), len(test)\n", "\n", "# Open a file to log errors during annotation processing\n", "file = open('/content/Files/log_file.txt','w')\n", "\n", "# Create spaCy DocBin objects for training and testing data\n", "db = get_spacy_doc(file, train)\n", "db.to_disk('/content/Files/train_data.spacy')\n", "\n", "db = get_spacy_doc(file, test)\n", "db.to_disk('/content/Files/test_data.spacy')\n", "\n", "# Close the error log file\n", "file.close()" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "tSP-FDTE3b_s", "outputId": "9bdc390e-42ab-41c6-da46-0ea5727f7926" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stderr", "text": [ "100%|██████████| 8/8 [00:00<00:00, 361.85it/s]\n", "100%|██████████| 2/2 [00:00<00:00, 336.61it/s]\n" ] } ] }, { "cell_type": "code", "source": [ "#!pip install cupy-cuda11x" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "1K_-YzEs326o", "outputId": "83ca3cf6-aea7-4671-bf3e-c132f51f6d21" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Collecting cupy-cuda11x\n", " Using cached cupy_cuda11x-13.1.0-cp310-cp310-manylinux2014_x86_64.whl.metadata (2.7 kB)\n", "Requirement already satisfied: numpy<1.29,>=1.22 in /usr/local/lib/python3.10/dist-packages (from cupy-cuda11x) (1.25.2)\n", "Requirement already satisfied: fastrlock>=0.5 in /usr/local/lib/python3.10/dist-packages (from cupy-cuda11x) (0.8.2)\n", "Using cached cupy_cuda11x-13.1.0-cp310-cp310-manylinux2014_x86_64.whl (95.4 MB)\n", "Installing collected packages: cupy-cuda11x\n", "Successfully installed cupy-cuda11x-13.1.0\n", "\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\u001b[33m\n", "\u001b[0m" ] } ] }, { "cell_type": "code", "source": [ "# Train a spaCy NER model using the provided configuration and data\n", "!python -m spacy train /content/Files/config.cfg --output /content/Files/output --paths.train /content/Files/train_data.spacy --paths.dev /content/Files/test_data.spacy --gpu-id 0" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "hwXigQ_F3sQM", "outputId": "b5bf329d-7868-4c86-c435-0172d6f52cd4" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "\u001b[38;5;2m✔ Created output directory: /content/Files/output\u001b[0m\n", "\u001b[38;5;4mℹ Saving to output directory: /content/Files/output\u001b[0m\n", "\u001b[38;5;4mℹ Using GPU: 0\u001b[0m\n", "\u001b[1m\n", "=========================== Initializing pipeline ===========================\u001b[0m\n", "/usr/local/lib/python3.10/dist-packages/transformers/utils/generic.py:441: UserWarning: torch.utils._pytree._register_pytree_node is deprecated. Please use torch.utils._pytree.register_pytree_node instead.\n", " _torch_pytree._register_pytree_node(\n", "/usr/local/lib/python3.10/dist-packages/transformers/utils/generic.py:309: UserWarning: torch.utils._pytree._register_pytree_node is deprecated. Please use torch.utils._pytree.register_pytree_node instead.\n", " _torch_pytree._register_pytree_node(\n", "/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py:1132: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.\n", " warnings.warn(\n", "tokenizer_config.json: 100% 25.0/25.0 [00:00<00:00, 168kB/s]\n", "config.json: 100% 481/481 [00:00<00:00, 3.52MB/s]\n", "vocab.json: 100% 899k/899k [00:00<00:00, 23.9MB/s]\n", "merges.txt: 100% 456k/456k [00:00<00:00, 1.07MB/s]\n", "tokenizer.json: 100% 1.36M/1.36M [00:00<00:00, 19.9MB/s]\n", "/usr/local/lib/python3.10/dist-packages/transformers/utils/generic.py:309: UserWarning: torch.utils._pytree._register_pytree_node is deprecated. Please use torch.utils._pytree.register_pytree_node instead.\n", " _torch_pytree._register_pytree_node(\n", "/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py:1132: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.\n", " warnings.warn(\n", "model.safetensors: 100% 499M/499M [00:02<00:00, 222MB/s]\n", "Some weights of RobertaModel were not initialized from the model checkpoint at roberta-base and are newly initialized: ['roberta.pooler.dense.weight', 'roberta.pooler.dense.bias']\n", "You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", "\u001b[38;5;2m✔ Initialized pipeline\u001b[0m\n", "\u001b[1m\n", "============================= Training pipeline =============================\u001b[0m\n", "\u001b[38;5;4mℹ Pipeline: ['transformer', 'ner']\u001b[0m\n", "\u001b[38;5;4mℹ Initial learn rate: 0.0\u001b[0m\n", "E # LOSS TRANS... LOSS NER ENTS_F ENTS_P ENTS_R SCORE \n", "--- ------ ------------- -------- ------ ------ ------ ------\n", " 0 0 268.17 400.21 0.00 0.00 0.00 0.00\n", "200 200 98072.61 30239.39 66.67 77.78 58.33 0.67\n", "400 400 1635.49 362.79 68.09 69.57 66.67 0.68\n", "600 600 5.84 7.76 68.09 69.57 66.67 0.68\n", "800 800 0.00 0.00 68.09 69.57 66.67 0.68\n", "1000 1000 0.00 0.00 68.09 69.57 66.67 0.68\n", "1200 1200 0.00 0.00 68.09 69.57 66.67 0.68\n", "1400 1400 0.00 0.00 66.67 66.67 66.67 0.67\n", "1600 1600 58.23 31.12 63.83 65.22 62.50 0.64\n", "1800 1800 0.00 0.00 63.83 65.22 62.50 0.64\n", "2000 2000 0.00 0.00 63.83 65.22 62.50 0.64\n", "\u001b[38;5;2m✔ Saved pipeline to output directory\u001b[0m\n", "/content/Files/output/model-last\n" ] } ] }, { "cell_type": "code", "source": [ "# prompt: save the above trained model to my drive\n", "\n", "!cp -r /content/Files/output /content/drive/MyDrive/spacy_ner_model\n" ], "metadata": { "id": "yrZVJYp6AH5E" }, "execution_count": null, "outputs": [] }, { "cell_type": "code", "source": [], "metadata": { "id": "wgWSmz9WDPXZ" }, "execution_count": null, "outputs": [] }, { "cell_type": "markdown", "source": [ "# **Testing out the Custom NER Model on a sample TimeOut Script**" ], "metadata": { "id": "Q7s6QJoFDQLA" } }, { "cell_type": "code", "source": [ "# prompt: mount drive and access a model from drive\n", "\n", "from google.colab import drive\n", "drive.mount('/content/drive')" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "nYQ1wfq7DoQ1", "outputId": "64aa7a52-446d-488b-f2cb-88e838b7f98f" }, "execution_count": 3, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount(\"/content/drive\", force_remount=True).\n" ] } ] }, { "cell_type": "code", "source": [ "# Import the spaCy library\n", "import spacy\n", "import spacy_transformers\n", "\n", "# Set the path to the model\n", "model_path = '/content/drive/MyDrive/spacy_ner_model/model-best'\n", "\n", "# Load the spaCy model\n", "nlp = spacy.load(model_path)\n", "\n", "# Check if the model loaded successfully\n", "print(nlp)" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "5mHfpLT7AwU6", "outputId": "ad04d794-37f7-474c-adb1-5808067ed366" }, "execution_count": 13, "outputs": [ { "output_type": "stream", "name": "stderr", "text": [ "/usr/local/lib/python3.10/dist-packages/spacy/util.py:1740: UserWarning: [W111] Jupyter notebook detected: if using `prefer_gpu()` or `require_gpu()`, include it in the same cell right before `spacy.load()` to ensure that the model is loaded on the correct device. More information: http://spacy.io/usage/v3#jupyter-notebook-gpu\n", " warnings.warn(Warnings.W111)\n" ] }, { "output_type": "stream", "name": "stdout", "text": [ "\n" ] } ] }, { "cell_type": "code", "source": [ "text = \"Good afternoon. Is everybody ready for timeout? I am Dr. Doe and I'm a cardiothoracic surgeon. Here we are with Mr. Mark, MRN 654321. He is scheduled to undergo a coronary artery bypass grafting. Sites are marked and consent is verified. Beta blockers, allergies, antibiotics, risk and blood loss is minimal. and blood loss is minimal. The patient lies in the supine position. Radiology studies are on standby and there's no requirement for blood products. Fire risk is two and expected duration is four hours. Antibiotic redosing at four hours. The patient has a warming blanket and leg compression devices are applied. Does everyone agree? Is there anything to add?\"" ], "metadata": { "id": "DBIoyKl_AYBZ" }, "execution_count": 20, "outputs": [] }, { "cell_type": "code", "source": [ "#other texts for testing" ], "metadata": { "id": "UAwq29JryVLS" }, "execution_count": 19, "outputs": [] }, { "cell_type": "code", "source": [ "# Process the extracted text using the loaded spaCy NER model\n", "doc = nlp(text)\n", "\n", "# Iterate through the named entities (entities) recognized by the model\n", "for ent in doc.ents:\n", " # Print the recognized text and its corresponding label\n", " print(ent.text, \" ->>>> \", ent.label_)" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "-csEHJ76AjHd", "outputId": "e7f5f67a-4183-477e-e5e7-de4082dc7adb" }, "execution_count": 21, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Good afternoon. ->>>> GREETING\n", "ready ->>>> READY\n", "Dr. Doe ->>>> NAME\n", "cardiothoracic surgeon. ->>>> MEDICAL_ROLE\n", "Mr. Mark ->>>> NAME\n", "MRN 654321. ->>>> MRN\n", "coronary artery bypass grafting ->>>> MEDICAL_PROCEDURE\n", "Sites are marked ->>>> SITES\n", "consent is verified. ->>>> CONSENT\n", "Beta blockers ->>>> BETA-BLOCKERS\n", "allergies ->>>> ALLERGY\n", "antibiotics ->>>> ANTIBIOTIC\n", "blood loss is minimal. ->>>> BLOOD LOSS\n", "blood loss is minimal. ->>>> BLOOD LOSS\n", "supine position. ->>>> POSITION\n", "on standby ->>>> RADIOLOGY\n", "blood products. ->>>> MEDS/EQUIPMENT\n", "Fire risk is two ->>>> FIRE RISK\n", "four hours. ->>>> DURATION\n", "Antibiotic redosing ->>>> ANTIBIOTIC\n", "four hours. ->>>> DURATION\n", "warming blanket ->>>> MEDS/EQUIPMENT\n", "leg compression devices ->>>> MEDS/EQUIPMENT\n", "agree ->>>> AGREE\n" ] } ] }, { "cell_type": "code", "source": [], "metadata": { "id": "7G_klOLWA_fk" }, "execution_count": null, "outputs": [] } ], "metadata": { "colab": { "provenance": [], "gpuType": "T4" }, "kernelspec": { "display_name": "Python 3", "name": "python3" }, "accelerator": "GPU" }, "nbformat": 4, "nbformat_minor": 0 }