{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "tensor([1., 1., 1., 1., 1.], device='mps:0')\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "/Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages/torch/_tensor_str.py:103: UserWarning: The operator 'aten::bitwise_and.Tensor_out' is not currently supported on the MPS backend and will fall back to run on the CPU. This may have performance implications. (Triggered internally at /Users/runner/work/_temp/anaconda/conda-bld/pytorch_1659484612588/work/aten/src/ATen/mps/MPSFallback.mm:11.)\n", " nonzero_finite_vals = torch.masked_select(tensor_view, torch.isfinite(tensor_view) & tensor_view.ne(0))\n" ] } ], "source": [ "import torch\n", "\n", "import torch\n", "\n", "\n", "# Check that MPS is available\n", "if not torch.backends.mps.is_available():\n", " if not torch.backends.mps.is_built():\n", " print(\"MPS not available because the current PyTorch install was not \"\n", " \"built with MPS enabled.\")\n", " else:\n", " print(\"MPS not available because the current MacOS version is not 12.3+ \"\n", " \"and/or you do not have an MPS-enabled device on this machine.\")\n", "\n", "else:\n", " mps_device = torch.device(\"mps\")\n", "\n", " # Create a Tensor directly on the mps device\n", " x = torch.ones(5, device=mps_device)\n", " # Or\n", " # x = torch.ones(5, device=\"mps\")\n", " print(x)\n", "\n", " # # Any operation happens on the GPU\n", " # y = x * 2\n", "\n", " # # Move your model to mps just like any other device\n", " # model = YourFavoriteNet()\n", " # model.to(mps_device)\n", "\n", " # # Now every call runs on the GPU\n", " # pred = model(x)" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Collecting diffusers==0.2.4\n", " Downloading diffusers-0.2.4-py3-none-any.whl (112 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m113.0/113.0 kB\u001b[0m \u001b[31m1.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m\n", "\u001b[?25hRequirement already satisfied: numpy in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from diffusers==0.2.4) (1.23.2)\n", "Collecting filelock\n", " Downloading filelock-3.8.0-py3-none-any.whl (10 kB)\n", "Collecting importlib-metadata\n", " Downloading importlib_metadata-4.12.0-py3-none-any.whl (21 kB)\n", "Requirement already satisfied: torch>=1.4 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from diffusers==0.2.4) (1.12.1)\n", "Requirement already satisfied: Pillow in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from diffusers==0.2.4) (9.2.0)\n", "Collecting huggingface-hub<1.0,>=0.8.1\n", " Downloading huggingface_hub-0.9.0-py3-none-any.whl (120 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m120.5/120.5 kB\u001b[0m \u001b[31m4.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hCollecting regex!=2019.12.17\n", " Downloading regex-2022.8.17-cp310-cp310-macosx_11_0_arm64.whl (282 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m283.0/283.0 kB\u001b[0m \u001b[31m6.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m\n", "\u001b[?25hRequirement already satisfied: requests in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from diffusers==0.2.4) (2.28.1)\n", "Requirement already satisfied: typing-extensions>=3.7.4.3 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from huggingface-hub<1.0,>=0.8.1->diffusers==0.2.4) (4.3.0)\n", "Requirement already satisfied: packaging>=20.9 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from huggingface-hub<1.0,>=0.8.1->diffusers==0.2.4) (21.3)\n", "Collecting pyyaml>=5.1\n", " Downloading PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl (173 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m174.0/174.0 kB\u001b[0m \u001b[31m6.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hCollecting tqdm\n", " Downloading tqdm-4.64.0-py2.py3-none-any.whl (78 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m78.4/78.4 kB\u001b[0m \u001b[31m2.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hCollecting zipp>=0.5\n", " Downloading zipp-3.8.1-py3-none-any.whl (5.6 kB)\n", "Requirement already satisfied: certifi>=2017.4.17 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from requests->diffusers==0.2.4) (2022.6.15)\n", "Requirement already satisfied: charset-normalizer<3,>=2 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from requests->diffusers==0.2.4) (2.1.1)\n", "Requirement already satisfied: idna<4,>=2.5 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from requests->diffusers==0.2.4) (3.3)\n", "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from requests->diffusers==0.2.4) (1.26.11)\n", "Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from packaging>=20.9->huggingface-hub<1.0,>=0.8.1->diffusers==0.2.4) (3.0.9)\n", "Installing collected packages: zipp, tqdm, regex, pyyaml, filelock, importlib-metadata, huggingface-hub, diffusers\n", "Successfully installed diffusers-0.2.4 filelock-3.8.0 huggingface-hub-0.9.0 importlib-metadata-4.12.0 pyyaml-6.0 regex-2022.8.17 tqdm-4.64.0 zipp-3.8.1\n", "Collecting transformers\n", " Downloading transformers-4.21.2-py3-none-any.whl (4.7 MB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m4.7/4.7 MB\u001b[0m \u001b[31m20.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n", "\u001b[?25hCollecting scipy\n", " Downloading scipy-1.9.0-cp310-cp310-macosx_12_0_arm64.whl (29.9 MB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m29.9/29.9 MB\u001b[0m \u001b[31m34.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n", "\u001b[?25hCollecting ftfy\n", " Downloading ftfy-6.1.1-py3-none-any.whl (53 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m53.1/53.1 kB\u001b[0m \u001b[31m2.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hRequirement already satisfied: huggingface-hub<1.0,>=0.1.0 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from transformers) (0.9.0)\n", "Collecting tokenizers!=0.11.3,<0.13,>=0.11.1\n", " Downloading tokenizers-0.12.1.tar.gz (220 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m220.7/220.7 kB\u001b[0m \u001b[31m8.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25h Installing build dependencies ... \u001b[?25ldone\n", "\u001b[?25h Getting requirements to build wheel ... \u001b[?25ldone\n", "\u001b[?25h Preparing metadata (pyproject.toml) ... \u001b[?25ldone\n", "\u001b[?25hRequirement already satisfied: packaging>=20.0 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from transformers) (21.3)\n", "Requirement already satisfied: numpy>=1.17 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from transformers) (1.23.2)\n", "Requirement already satisfied: tqdm>=4.27 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from transformers) (4.64.0)\n", "Requirement already satisfied: requests in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from transformers) (2.28.1)\n", "Requirement already satisfied: regex!=2019.12.17 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from transformers) (2022.8.17)\n", "Requirement already satisfied: pyyaml>=5.1 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from transformers) (6.0)\n", "Requirement already satisfied: filelock in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from transformers) (3.8.0)\n", "Requirement already satisfied: wcwidth>=0.2.5 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ftfy) (0.2.5)\n", "Requirement already satisfied: typing-extensions>=3.7.4.3 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from huggingface-hub<1.0,>=0.1.0->transformers) (4.3.0)\n", "Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from packaging>=20.0->transformers) (3.0.9)\n", "Requirement already satisfied: idna<4,>=2.5 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from requests->transformers) (3.3)\n", "Requirement already satisfied: certifi>=2017.4.17 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from requests->transformers) (2022.6.15)\n", "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from requests->transformers) (1.26.11)\n", "Requirement already satisfied: charset-normalizer<3,>=2 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from requests->transformers) (2.1.1)\n", "Building wheels for collected packages: tokenizers\n", " Building wheel for tokenizers (pyproject.toml) ... \u001b[?25lerror\n", " \u001b[1;31merror\u001b[0m: \u001b[1msubprocess-exited-with-error\u001b[0m\n", " \n", " \u001b[31m×\u001b[0m \u001b[32mBuilding wheel for tokenizers \u001b[0m\u001b[1;32m(\u001b[0m\u001b[32mpyproject.toml\u001b[0m\u001b[1;32m)\u001b[0m did not run successfully.\n", " \u001b[31m│\u001b[0m exit code: \u001b[1;36m1\u001b[0m\n", " \u001b[31m╰─>\u001b[0m \u001b[31m[51 lines of output]\u001b[0m\n", " \u001b[31m \u001b[0m running bdist_wheel\n", " \u001b[31m \u001b[0m running build\n", " \u001b[31m \u001b[0m running build_py\n", " \u001b[31m \u001b[0m creating build\n", " \u001b[31m \u001b[0m creating build/lib.macosx-11.0-arm64-cpython-310\n", " \u001b[31m \u001b[0m creating build/lib.macosx-11.0-arm64-cpython-310/tokenizers\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/__init__.py -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers\n", " \u001b[31m \u001b[0m creating build/lib.macosx-11.0-arm64-cpython-310/tokenizers/models\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/models/__init__.py -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/models\n", " \u001b[31m \u001b[0m creating build/lib.macosx-11.0-arm64-cpython-310/tokenizers/decoders\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/decoders/__init__.py -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/decoders\n", " \u001b[31m \u001b[0m creating build/lib.macosx-11.0-arm64-cpython-310/tokenizers/normalizers\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/normalizers/__init__.py -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/normalizers\n", " \u001b[31m \u001b[0m creating build/lib.macosx-11.0-arm64-cpython-310/tokenizers/pre_tokenizers\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/pre_tokenizers/__init__.py -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/pre_tokenizers\n", " \u001b[31m \u001b[0m creating build/lib.macosx-11.0-arm64-cpython-310/tokenizers/processors\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/processors/__init__.py -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/processors\n", " \u001b[31m \u001b[0m creating build/lib.macosx-11.0-arm64-cpython-310/tokenizers/trainers\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/trainers/__init__.py -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/trainers\n", " \u001b[31m \u001b[0m creating build/lib.macosx-11.0-arm64-cpython-310/tokenizers/implementations\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/implementations/byte_level_bpe.py -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/implementations\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/implementations/sentencepiece_unigram.py -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/implementations\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/implementations/sentencepiece_bpe.py -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/implementations\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/implementations/base_tokenizer.py -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/implementations\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/implementations/__init__.py -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/implementations\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/implementations/char_level_bpe.py -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/implementations\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/implementations/bert_wordpiece.py -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/implementations\n", " \u001b[31m \u001b[0m creating build/lib.macosx-11.0-arm64-cpython-310/tokenizers/tools\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/tools/__init__.py -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/tools\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/tools/visualizer.py -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/tools\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/__init__.pyi -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/models/__init__.pyi -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/models\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/decoders/__init__.pyi -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/decoders\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/normalizers/__init__.pyi -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/normalizers\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/pre_tokenizers/__init__.pyi -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/pre_tokenizers\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/processors/__init__.pyi -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/processors\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/trainers/__init__.pyi -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/trainers\n", " \u001b[31m \u001b[0m copying py_src/tokenizers/tools/visualizer-styles.css -> build/lib.macosx-11.0-arm64-cpython-310/tokenizers/tools\n", " \u001b[31m \u001b[0m running build_ext\n", " \u001b[31m \u001b[0m running build_rust\n", " \u001b[31m \u001b[0m error: can't find Rust compiler\n", " \u001b[31m \u001b[0m \n", " \u001b[31m \u001b[0m If you are using an outdated pip version, it is possible a prebuilt wheel is available for this package but pip is not able to install from it. Installing from the wheel would avoid the need for a Rust compiler.\n", " \u001b[31m \u001b[0m \n", " \u001b[31m \u001b[0m To update pip, run:\n", " \u001b[31m \u001b[0m \n", " \u001b[31m \u001b[0m pip install --upgrade pip\n", " \u001b[31m \u001b[0m \n", " \u001b[31m \u001b[0m and then retry package installation.\n", " \u001b[31m \u001b[0m \n", " \u001b[31m \u001b[0m If you did intend to build this package from source, try installing a Rust compiler from your system package manager and ensure it is on the PATH during installation. Alternatively, rustup (available at https://rustup.rs) is the recommended way to download and update the Rust compiler toolchain.\n", " \u001b[31m \u001b[0m \u001b[31m[end of output]\u001b[0m\n", " \n", " \u001b[1;35mnote\u001b[0m: This error originates from a subprocess, and is likely not a problem with pip.\n", "\u001b[?25h\u001b[31m ERROR: Failed building wheel for tokenizers\u001b[0m\u001b[31m\n", "\u001b[0mFailed to build tokenizers\n", "\u001b[31mERROR: Could not build wheels for tokenizers, which is required to install pyproject.toml-based projects\u001b[0m\u001b[31m\n", "\u001b[0mCollecting ipywidgets<8,>=7\n", " Downloading ipywidgets-7.7.2-py2.py3-none-any.whl (123 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m123.4/123.4 kB\u001b[0m \u001b[31m2.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m\n", "\u001b[?25hRequirement already satisfied: ipython>=4.0.0 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ipywidgets<8,>=7) (8.4.0)\n", "Requirement already satisfied: ipykernel>=4.5.1 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ipywidgets<8,>=7) (6.15.1)\n", "Collecting widgetsnbextension~=3.6.0\n", " Downloading widgetsnbextension-3.6.1-py2.py3-none-any.whl (1.6 MB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.6/1.6 MB\u001b[0m \u001b[31m13.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n", "\u001b[?25hCollecting ipython-genutils~=0.2.0\n", " Downloading ipython_genutils-0.2.0-py2.py3-none-any.whl (26 kB)\n", "Collecting jupyterlab-widgets<3,>=1.0.0\n", " Downloading jupyterlab_widgets-1.1.1-py3-none-any.whl (245 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m245.3/245.3 kB\u001b[0m \u001b[31m9.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hRequirement already satisfied: traitlets>=4.3.1 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ipywidgets<8,>=7) (5.3.0)\n", "Requirement already satisfied: nest-asyncio in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ipykernel>=4.5.1->ipywidgets<8,>=7) (1.5.5)\n", "Requirement already satisfied: psutil in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ipykernel>=4.5.1->ipywidgets<8,>=7) (5.9.1)\n", "Requirement already satisfied: matplotlib-inline>=0.1 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ipykernel>=4.5.1->ipywidgets<8,>=7) (0.1.6)\n", "Requirement already satisfied: jupyter-client>=6.1.12 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ipykernel>=4.5.1->ipywidgets<8,>=7) (7.3.4)\n", "Requirement already satisfied: debugpy>=1.0 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ipykernel>=4.5.1->ipywidgets<8,>=7) (1.6.3)\n", "Requirement already satisfied: pyzmq>=17 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ipykernel>=4.5.1->ipywidgets<8,>=7) (23.2.1)\n", "Requirement already satisfied: appnope in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ipykernel>=4.5.1->ipywidgets<8,>=7) (0.1.3)\n", "Requirement already satisfied: tornado>=6.1 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ipykernel>=4.5.1->ipywidgets<8,>=7) (6.2)\n", "Requirement already satisfied: packaging in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ipykernel>=4.5.1->ipywidgets<8,>=7) (21.3)\n", "Requirement already satisfied: pexpect>4.3 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ipython>=4.0.0->ipywidgets<8,>=7) (4.8.0)\n", "Requirement already satisfied: pickleshare in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ipython>=4.0.0->ipywidgets<8,>=7) (0.7.5)\n", "Requirement already satisfied: prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ipython>=4.0.0->ipywidgets<8,>=7) (3.0.30)\n", "Requirement already satisfied: decorator in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ipython>=4.0.0->ipywidgets<8,>=7) (5.1.1)\n", "Requirement already satisfied: jedi>=0.16 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ipython>=4.0.0->ipywidgets<8,>=7) (0.18.1)\n", "Requirement already satisfied: setuptools>=18.5 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ipython>=4.0.0->ipywidgets<8,>=7) (65.3.0)\n", "Requirement already satisfied: stack-data in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ipython>=4.0.0->ipywidgets<8,>=7) (0.4.0)\n", "Requirement already satisfied: pygments>=2.4.0 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ipython>=4.0.0->ipywidgets<8,>=7) (2.13.0)\n", "Requirement already satisfied: backcall in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from ipython>=4.0.0->ipywidgets<8,>=7) (0.2.0)\n", "Collecting notebook>=4.4.1\n", " Downloading notebook-6.4.12-py3-none-any.whl (9.9 MB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m9.9/9.9 MB\u001b[0m \u001b[31m31.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n", "\u001b[?25hRequirement already satisfied: parso<0.9.0,>=0.8.0 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from jedi>=0.16->ipython>=4.0.0->ipywidgets<8,>=7) (0.8.3)\n", "Requirement already satisfied: python-dateutil>=2.8.2 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from jupyter-client>=6.1.12->ipykernel>=4.5.1->ipywidgets<8,>=7) (2.8.2)\n", "Requirement already satisfied: entrypoints in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from jupyter-client>=6.1.12->ipykernel>=4.5.1->ipywidgets<8,>=7) (0.4)\n", "Requirement already satisfied: jupyter-core>=4.9.2 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from jupyter-client>=6.1.12->ipykernel>=4.5.1->ipywidgets<8,>=7) (4.11.1)\n", "Collecting terminado>=0.8.3\n", " Downloading terminado-0.15.0-py3-none-any.whl (16 kB)\n", "Collecting nbformat\n", " Downloading nbformat-5.4.0-py3-none-any.whl (73 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m73.3/73.3 kB\u001b[0m \u001b[31m2.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hCollecting jinja2\n", " Downloading Jinja2-3.1.2-py3-none-any.whl (133 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m133.1/133.1 kB\u001b[0m \u001b[31m4.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hCollecting prometheus-client\n", " Downloading prometheus_client-0.14.1-py3-none-any.whl (59 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m59.5/59.5 kB\u001b[0m \u001b[31m2.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hCollecting nbconvert>=5\n", " Downloading nbconvert-7.0.0-py3-none-any.whl (271 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m271.3/271.3 kB\u001b[0m \u001b[31m10.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hCollecting argon2-cffi\n", " Downloading argon2_cffi-21.3.0-py3-none-any.whl (14 kB)\n", "Collecting Send2Trash>=1.8.0\n", " Downloading Send2Trash-1.8.0-py3-none-any.whl (18 kB)\n", "Requirement already satisfied: ptyprocess>=0.5 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from pexpect>4.3->ipython>=4.0.0->ipywidgets<8,>=7) (0.7.0)\n", "Requirement already satisfied: wcwidth in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0->ipython>=4.0.0->ipywidgets<8,>=7) (0.2.5)\n", "Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from packaging->ipykernel>=4.5.1->ipywidgets<8,>=7) (3.0.9)\n", "Requirement already satisfied: executing in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from stack-data->ipython>=4.0.0->ipywidgets<8,>=7) (0.10.0)\n", "Requirement already satisfied: asttokens in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from stack-data->ipython>=4.0.0->ipywidgets<8,>=7) (2.0.8)\n", "Requirement already satisfied: pure-eval in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from stack-data->ipython>=4.0.0->ipywidgets<8,>=7) (0.2.2)\n", "Collecting mistune<3,>=2.0.3\n", " Downloading mistune-2.0.4-py2.py3-none-any.whl (24 kB)\n", "Collecting beautifulsoup4\n", " Downloading beautifulsoup4-4.11.1-py3-none-any.whl (128 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m128.2/128.2 kB\u001b[0m \u001b[31m5.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hCollecting tinycss2\n", " Downloading tinycss2-1.1.1-py3-none-any.whl (21 kB)\n", "Collecting defusedxml\n", " Downloading defusedxml-0.7.1-py2.py3-none-any.whl (25 kB)\n", "Collecting markupsafe>=2.0\n", " Downloading MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl (17 kB)\n", "Collecting bleach\n", " Downloading bleach-5.0.1-py3-none-any.whl (160 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m160.9/160.9 kB\u001b[0m \u001b[31m5.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hCollecting nbclient>=0.5.0\n", " Downloading nbclient-0.6.7-py3-none-any.whl (71 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m71.8/71.8 kB\u001b[0m \u001b[31m2.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hCollecting pandocfilters>=1.4.1\n", " Downloading pandocfilters-1.5.0-py2.py3-none-any.whl (8.7 kB)\n", "Collecting jupyterlab-pygments\n", " Downloading jupyterlab_pygments-0.2.2-py2.py3-none-any.whl (21 kB)\n", "Collecting lxml\n", " Downloading lxml-4.9.1.tar.gz (3.4 MB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.4/3.4 MB\u001b[0m \u001b[31m27.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n", "\u001b[?25h Preparing metadata (setup.py) ... \u001b[?25ldone\n", "\u001b[?25hCollecting fastjsonschema\n", " Downloading fastjsonschema-2.16.1-py3-none-any.whl (22 kB)\n", "Collecting jsonschema>=2.6\n", " Downloading jsonschema-4.14.0-py3-none-any.whl (82 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m82.4/82.4 kB\u001b[0m \u001b[31m3.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hRequirement already satisfied: six>=1.5 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from python-dateutil>=2.8.2->jupyter-client>=6.1.12->ipykernel>=4.5.1->ipywidgets<8,>=7) (1.16.0)\n", "Collecting argon2-cffi-bindings\n", " Downloading argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl (53 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m53.1/53.1 kB\u001b[0m \u001b[31m2.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hCollecting attrs>=17.4.0\n", " Downloading attrs-22.1.0-py2.py3-none-any.whl (58 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m58.8/58.8 kB\u001b[0m \u001b[31m2.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hCollecting pyrsistent!=0.17.0,!=0.17.1,!=0.17.2,>=0.14.0\n", " Downloading pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl (81 kB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m81.4/81.4 kB\u001b[0m \u001b[31m3.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25hRequirement already satisfied: cffi>=1.0.1 in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from argon2-cffi-bindings->argon2-cffi->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets<8,>=7) (1.15.1)\n", "Collecting soupsieve>1.2\n", " Downloading soupsieve-2.3.2.post1-py3-none-any.whl (37 kB)\n", "Collecting webencodings\n", " Downloading webencodings-0.5.1-py2.py3-none-any.whl (11 kB)\n", "Requirement already satisfied: pycparser in /Users/johnnydevriese/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages (from cffi>=1.0.1->argon2-cffi-bindings->argon2-cffi->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets<8,>=7) (2.21)\n", "Building wheels for collected packages: lxml\n", " Building wheel for lxml (setup.py) ... \u001b[?25ldone\n", "\u001b[?25h Created wheel for lxml: filename=lxml-4.9.1-cp310-cp310-macosx_11_0_arm64.whl size=1423116 sha256=1e181d1aacf06a988fc9ff742b400e46f8f773abdef087dab0ac94e2435f8e60\n", " Stored in directory: /Users/johnnydevriese/Library/Caches/pip/wheels/a4/ec/7b/8acde6da24b5aabeee049213d5bec12d1e9214d3cae276387b\n", "Successfully built lxml\n", "Installing collected packages: webencodings, Send2Trash, mistune, ipython-genutils, fastjsonschema, tinycss2, terminado, soupsieve, pyrsistent, prometheus-client, pandocfilters, markupsafe, lxml, jupyterlab-widgets, jupyterlab-pygments, defusedxml, bleach, attrs, jsonschema, jinja2, beautifulsoup4, argon2-cffi-bindings, nbformat, argon2-cffi, nbclient, nbconvert, notebook, widgetsnbextension, ipywidgets\n", "Successfully installed Send2Trash-1.8.0 argon2-cffi-21.3.0 argon2-cffi-bindings-21.2.0 attrs-22.1.0 beautifulsoup4-4.11.1 bleach-5.0.1 defusedxml-0.7.1 fastjsonschema-2.16.1 ipython-genutils-0.2.0 ipywidgets-7.7.2 jinja2-3.1.2 jsonschema-4.14.0 jupyterlab-pygments-0.2.2 jupyterlab-widgets-1.1.1 lxml-4.9.1 markupsafe-2.1.1 mistune-2.0.4 nbclient-0.6.7 nbconvert-7.0.0 nbformat-5.4.0 notebook-6.4.12 pandocfilters-1.5.0 prometheus-client-0.14.1 pyrsistent-0.18.1 soupsieve-2.3.2.post1 terminado-0.15.0 tinycss2-1.1.1 webencodings-0.5.1 widgetsnbextension-3.6.1\n" ] } ], "source": [ "!pip install diffusers==0.2.4\n", "!pip install transformers scipy ftfy\n", "!pip install \"ipywidgets>=7,<8\"" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "8f312208bf4744df84d15d15e956afb2", "version_major": 2, "version_minor": 0 }, "text/plain": [ "VBox(children=(HTML(value='
\u001b[0;34m()\u001b[0m\n\u001b[1;32m 19\u001b[0m prompt \u001b[39m=\u001b[39m \u001b[39m\"\u001b[39m\u001b[39ma photo of an astronaut riding a horse on mars\u001b[39m\u001b[39m\"\u001b[39m\n\u001b[1;32m 20\u001b[0m \u001b[39m# with autocast(\"mps\"):\u001b[39;00m\n\u001b[0;32m---> 21\u001b[0m image \u001b[39m=\u001b[39m pipe(prompt)[\u001b[39m\"\u001b[39m\u001b[39msample\u001b[39m\u001b[39m\"\u001b[39m][\u001b[39m0\u001b[39m] \n\u001b[1;32m 23\u001b[0m image\u001b[39m.\u001b[39msave(\u001b[39m\"\u001b[39m\u001b[39mastronaut_rides_horse.png\u001b[39m\u001b[39m\"\u001b[39m)\n", "File \u001b[0;32m~/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages/torch/autograd/grad_mode.py:27\u001b[0m, in \u001b[0;36m_DecoratorContextManager.__call__..decorate_context\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 24\u001b[0m \u001b[39m@functools\u001b[39m\u001b[39m.\u001b[39mwraps(func)\n\u001b[1;32m 25\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mdecorate_context\u001b[39m(\u001b[39m*\u001b[39margs, \u001b[39m*\u001b[39m\u001b[39m*\u001b[39mkwargs):\n\u001b[1;32m 26\u001b[0m \u001b[39mwith\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mclone():\n\u001b[0;32m---> 27\u001b[0m \u001b[39mreturn\u001b[39;00m func(\u001b[39m*\u001b[39;49margs, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mkwargs)\n", "File \u001b[0;32m~/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion.py:82\u001b[0m, in \u001b[0;36mStableDiffusionPipeline.__call__\u001b[0;34m(self, prompt, height, width, num_inference_steps, guidance_scale, eta, generator, output_type, **kwargs)\u001b[0m\n\u001b[1;32m 74\u001b[0m \u001b[39m# get prompt text embeddings\u001b[39;00m\n\u001b[1;32m 75\u001b[0m text_input \u001b[39m=\u001b[39m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mtokenizer(\n\u001b[1;32m 76\u001b[0m prompt,\n\u001b[1;32m 77\u001b[0m padding\u001b[39m=\u001b[39m\u001b[39m\"\u001b[39m\u001b[39mmax_length\u001b[39m\u001b[39m\"\u001b[39m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 80\u001b[0m return_tensors\u001b[39m=\u001b[39m\u001b[39m\"\u001b[39m\u001b[39mpt\u001b[39m\u001b[39m\"\u001b[39m,\n\u001b[1;32m 81\u001b[0m )\n\u001b[0;32m---> 82\u001b[0m text_embeddings \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mtext_encoder(text_input\u001b[39m.\u001b[39;49minput_ids\u001b[39m.\u001b[39;49mto(\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mdevice))[\u001b[39m0\u001b[39m]\n\u001b[1;32m 84\u001b[0m \u001b[39m# here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\u001b[39;00m\n\u001b[1;32m 85\u001b[0m \u001b[39m# of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\u001b[39;00m\n\u001b[1;32m 86\u001b[0m \u001b[39m# corresponds to doing no classifier free guidance.\u001b[39;00m\n\u001b[1;32m 87\u001b[0m do_classifier_free_guidance \u001b[39m=\u001b[39m guidance_scale \u001b[39m>\u001b[39m \u001b[39m1.0\u001b[39m\n", "File \u001b[0;32m~/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages/torch/nn/modules/module.py:1130\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 1126\u001b[0m \u001b[39m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1127\u001b[0m \u001b[39m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1128\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mnot\u001b[39;00m (\u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_backward_hooks \u001b[39mor\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_forward_hooks \u001b[39mor\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_forward_pre_hooks \u001b[39mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1129\u001b[0m \u001b[39mor\u001b[39;00m _global_forward_hooks \u001b[39mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1130\u001b[0m \u001b[39mreturn\u001b[39;00m forward_call(\u001b[39m*\u001b[39;49m\u001b[39minput\u001b[39;49m, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mkwargs)\n\u001b[1;32m 1131\u001b[0m \u001b[39m# Do not call functions when jit is used\u001b[39;00m\n\u001b[1;32m 1132\u001b[0m full_backward_hooks, non_full_backward_hooks \u001b[39m=\u001b[39m [], []\n", "File \u001b[0;32m~/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages/transformers/models/clip/modeling_clip.py:721\u001b[0m, in \u001b[0;36mCLIPTextModel.forward\u001b[0;34m(self, input_ids, attention_mask, position_ids, output_attentions, output_hidden_states, return_dict)\u001b[0m\n\u001b[1;32m 693\u001b[0m \u001b[39m@add_start_docstrings_to_model_forward\u001b[39m(CLIP_TEXT_INPUTS_DOCSTRING)\n\u001b[1;32m 694\u001b[0m \u001b[39m@replace_return_docstrings\u001b[39m(output_type\u001b[39m=\u001b[39mBaseModelOutputWithPooling, config_class\u001b[39m=\u001b[39mCLIPTextConfig)\n\u001b[1;32m 695\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mforward\u001b[39m(\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 702\u001b[0m return_dict: Optional[\u001b[39mbool\u001b[39m] \u001b[39m=\u001b[39m \u001b[39mNone\u001b[39;00m,\n\u001b[1;32m 703\u001b[0m ) \u001b[39m-\u001b[39m\u001b[39m>\u001b[39m Union[Tuple, BaseModelOutputWithPooling]:\n\u001b[1;32m 704\u001b[0m \u001b[39mr\u001b[39m\u001b[39m\"\"\"\u001b[39;00m\n\u001b[1;32m 705\u001b[0m \u001b[39m Returns:\u001b[39;00m\n\u001b[1;32m 706\u001b[0m \n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 719\u001b[0m \u001b[39m >>> pooled_output = outputs.pooler_output # pooled (EOS token) states\u001b[39;00m\n\u001b[1;32m 720\u001b[0m \u001b[39m ```\"\"\"\u001b[39;00m\n\u001b[0;32m--> 721\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mtext_model(\n\u001b[1;32m 722\u001b[0m input_ids\u001b[39m=\u001b[39;49minput_ids,\n\u001b[1;32m 723\u001b[0m attention_mask\u001b[39m=\u001b[39;49mattention_mask,\n\u001b[1;32m 724\u001b[0m position_ids\u001b[39m=\u001b[39;49mposition_ids,\n\u001b[1;32m 725\u001b[0m output_attentions\u001b[39m=\u001b[39;49moutput_attentions,\n\u001b[1;32m 726\u001b[0m output_hidden_states\u001b[39m=\u001b[39;49moutput_hidden_states,\n\u001b[1;32m 727\u001b[0m return_dict\u001b[39m=\u001b[39;49mreturn_dict,\n\u001b[1;32m 728\u001b[0m )\n", "File \u001b[0;32m~/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages/torch/nn/modules/module.py:1130\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 1126\u001b[0m \u001b[39m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1127\u001b[0m \u001b[39m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1128\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mnot\u001b[39;00m (\u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_backward_hooks \u001b[39mor\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_forward_hooks \u001b[39mor\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_forward_pre_hooks \u001b[39mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1129\u001b[0m \u001b[39mor\u001b[39;00m _global_forward_hooks \u001b[39mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1130\u001b[0m \u001b[39mreturn\u001b[39;00m forward_call(\u001b[39m*\u001b[39;49m\u001b[39minput\u001b[39;49m, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mkwargs)\n\u001b[1;32m 1131\u001b[0m \u001b[39m# Do not call functions when jit is used\u001b[39;00m\n\u001b[1;32m 1132\u001b[0m full_backward_hooks, non_full_backward_hooks \u001b[39m=\u001b[39m [], []\n", "File \u001b[0;32m~/miniforge3/envs/pytorch-1-12/lib/python3.10/site-packages/transformers/models/clip/modeling_clip.py:656\u001b[0m, in \u001b[0;36mCLIPTextTransformer.forward\u001b[0;34m(self, input_ids, attention_mask, position_ids, output_attentions, output_hidden_states, return_dict)\u001b[0m\n\u001b[1;32m 652\u001b[0m last_hidden_state \u001b[39m=\u001b[39m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mfinal_layer_norm(last_hidden_state)\n\u001b[1;32m 654\u001b[0m \u001b[39m# text_embeds.shape = [batch_size, sequence_length, transformer.width]\u001b[39;00m\n\u001b[1;32m 655\u001b[0m \u001b[39m# take features from the eot embedding (eot_token is the highest number in each sequence)\u001b[39;00m\n\u001b[0;32m--> 656\u001b[0m pooled_output \u001b[39m=\u001b[39m last_hidden_state[torch\u001b[39m.\u001b[39;49marange(last_hidden_state\u001b[39m.\u001b[39;49mshape[\u001b[39m0\u001b[39;49m]), input_ids\u001b[39m.\u001b[39;49margmax(dim\u001b[39m=\u001b[39;49m\u001b[39m-\u001b[39;49m\u001b[39m1\u001b[39;49m)]\n\u001b[1;32m 658\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mnot\u001b[39;00m return_dict:\n\u001b[1;32m 659\u001b[0m \u001b[39mreturn\u001b[39;00m (last_hidden_state, pooled_output) \u001b[39m+\u001b[39m encoder_outputs[\u001b[39m1\u001b[39m:]\n", "\u001b[0;31mNotImplementedError\u001b[0m: The operator 'aten::index.Tensor' is not current implemented for the MPS device. If you want this op to be added in priority during the prototype phase of this feature, please comment on https://github.com/pytorch/pytorch/issues/77764. As a temporary fix, you can set the environment variable `PYTORCH_ENABLE_MPS_FALLBACK=1` to use the CPU as a fallback for this op. WARNING: this will be slower than running natively on MPS." ] } ], "source": [ "# make sure you're logged in with `huggingface-cli login`\n", "from torch import autocast\n", "from diffusers import StableDiffusionPipeline, LMSDiscreteScheduler\n", "\n", "lms = LMSDiscreteScheduler(\n", " beta_start=0.00085, \n", " beta_end=0.012, \n", " beta_schedule=\"scaled_linear\"\n", ")\n", "\n", "pipe = StableDiffusionPipeline.from_pretrained(\n", " \"CompVis/stable-diffusion-v1-3\", \n", " scheduler=lms,\n", " torch_dtype=torch.float16, \n", " revision=\"fp16\",\n", " use_auth_token=True\n", ").to(\"mps\")\n", "\n", "prompt = \"a photo of an astronaut riding a horse on mars\"\n", "# with autocast(\"mps\"):\n", "image = pipe(prompt)[\"sample\"][0] \n", " \n", "image.save(\"astronaut_rides_horse.png\")" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3.10.6 ('pytorch-1-12')", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.6" }, "orig_nbformat": 4, "vscode": { "interpreter": { "hash": "d91b751b6cafe1e473109edab0583e459c2e471c181546b21e3fef0fb0f3aa3b" } } }, "nbformat": 4, "nbformat_minor": 2 }