Upload 9 files
Browse files- Alpaca_+_Codellama_34b_full_example.ipynb +4 -11
- Alpaca_+_Gemma_7b_full_example.ipynb +4 -11
- Alpaca_+_Llama_7b_full_example.ipynb +4 -11
- Alpaca_+_Mistral_7b_full_example.ipynb +4 -11
- Alpaca_+_TinyLlama_+_RoPE_Scaling_full_example.ipynb +4 -11
- ChatML_+_chat_templates_+_Mistral_7b_full_example.ipynb +4 -11
- DPO_Zephyr_Unsloth_Example.ipynb +4 -11
- Mistral_7b_Text_Completion_Raw_Text_training_full_example(1).ipynb +0 -0
- Mistral_7b_Text_Completion_Raw_Text_training_full_example.ipynb +4 -11
Alpaca_+_Codellama_34b_full_example.ipynb
CHANGED
|
@@ -27,17 +27,10 @@
|
|
| 27 |
"outputs": [],
|
| 28 |
"source": [
|
| 29 |
"%%capture\n",
|
| 30 |
-
"
|
| 31 |
-
"
|
| 32 |
-
"#
|
| 33 |
-
"!pip install \"unsloth[colab-new] @ git+https://github.com/unslothai/unsloth.git\"\n"
|
| 34 |
-
"if major_version >= 8:\n",
|
| 35 |
-
" # Use this for new GPUs like Ampere, Hopper GPUs (RTX 30xx, RTX 40xx, A100, H100, L40)\n",
|
| 36 |
-
" !pip install --no-deps packaging ninja einops flash-attn xformers trl peft accelerate bitsandbytes\n",
|
| 37 |
-
"else:\n",
|
| 38 |
-
" # Use this for older GPUs (V100, Tesla T4, RTX 20xx)\n",
|
| 39 |
-
" !pip install --no-deps xformers trl peft accelerate bitsandbytes\n",
|
| 40 |
-
"pass"
|
| 41 |
]
|
| 42 |
},
|
| 43 |
{
|
|
|
|
| 27 |
"outputs": [],
|
| 28 |
"source": [
|
| 29 |
"%%capture\n",
|
| 30 |
+
"# Installs Unsloth, Xformers (Flash Attention) and all other packages\n!",
|
| 31 |
+
"!pip install unsloth\n",
|
| 32 |
+
"# Get latest Unsloth\n",
|
| 33 |
+
"!pip install --upgrade --no-deps \"unsloth[colab-new] @ git+https://github.com/unslothai/unsloth.git\"\n"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 34 |
]
|
| 35 |
},
|
| 36 |
{
|
Alpaca_+_Gemma_7b_full_example.ipynb
CHANGED
|
@@ -29,17 +29,10 @@
|
|
| 29 |
"outputs": [],
|
| 30 |
"source": [
|
| 31 |
"%%capture\n",
|
| 32 |
-
"
|
| 33 |
-
"
|
| 34 |
-
"#
|
| 35 |
-
"!pip install \"unsloth[colab-new] @ git+https://github.com/unslothai/unsloth.git\"\n"
|
| 36 |
-
"if major_version >= 8:\n",
|
| 37 |
-
" # Use this for new GPUs like Ampere, Hopper GPUs (RTX 30xx, RTX 40xx, A100, H100, L40)\n",
|
| 38 |
-
" !pip install --no-deps packaging ninja einops flash-attn xformers trl peft accelerate bitsandbytes\n",
|
| 39 |
-
"else:\n",
|
| 40 |
-
" # Use this for older GPUs (V100, Tesla T4, RTX 20xx)\n",
|
| 41 |
-
" !pip install --no-deps xformers trl peft accelerate bitsandbytes\n",
|
| 42 |
-
"pass"
|
| 43 |
]
|
| 44 |
},
|
| 45 |
{
|
|
|
|
| 29 |
"outputs": [],
|
| 30 |
"source": [
|
| 31 |
"%%capture\n",
|
| 32 |
+
"# Installs Unsloth, Xformers (Flash Attention) and all other packages\n!",
|
| 33 |
+
"!pip install unsloth\n",
|
| 34 |
+
"# Get latest Unsloth\n",
|
| 35 |
+
"!pip install --upgrade --no-deps \"unsloth[colab-new] @ git+https://github.com/unslothai/unsloth.git\"\n"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
]
|
| 37 |
},
|
| 38 |
{
|
Alpaca_+_Llama_7b_full_example.ipynb
CHANGED
|
@@ -27,17 +27,10 @@
|
|
| 27 |
"outputs": [],
|
| 28 |
"source": [
|
| 29 |
"%%capture\n",
|
| 30 |
-
"
|
| 31 |
-
"
|
| 32 |
-
"#
|
| 33 |
-
"!pip install \"unsloth[colab-new] @ git+https://github.com/unslothai/unsloth.git\"\n"
|
| 34 |
-
"if major_version >= 8:\n",
|
| 35 |
-
" # Use this for new GPUs like Ampere, Hopper GPUs (RTX 30xx, RTX 40xx, A100, H100, L40)\n",
|
| 36 |
-
" !pip install --no-deps packaging ninja einops flash-attn xformers trl peft accelerate bitsandbytes\n",
|
| 37 |
-
"else:\n",
|
| 38 |
-
" # Use this for older GPUs (V100, Tesla T4, RTX 20xx)\n",
|
| 39 |
-
" !pip install --no-deps xformers trl peft accelerate bitsandbytes\n",
|
| 40 |
-
"pass"
|
| 41 |
]
|
| 42 |
},
|
| 43 |
{
|
|
|
|
| 27 |
"outputs": [],
|
| 28 |
"source": [
|
| 29 |
"%%capture\n",
|
| 30 |
+
"# Installs Unsloth, Xformers (Flash Attention) and all other packages\n!",
|
| 31 |
+
"!pip install unsloth\n",
|
| 32 |
+
"# Get latest Unsloth\n",
|
| 33 |
+
"!pip install --upgrade --no-deps \"unsloth[colab-new] @ git+https://github.com/unslothai/unsloth.git\"\n"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 34 |
]
|
| 35 |
},
|
| 36 |
{
|
Alpaca_+_Mistral_7b_full_example.ipynb
CHANGED
|
@@ -27,17 +27,10 @@
|
|
| 27 |
"outputs": [],
|
| 28 |
"source": [
|
| 29 |
"%%capture\n",
|
| 30 |
-
"
|
| 31 |
-
"
|
| 32 |
-
"#
|
| 33 |
-
"!pip install \"unsloth[colab-new] @ git+https://github.com/unslothai/unsloth.git\"\n"
|
| 34 |
-
"if major_version >= 8:\n",
|
| 35 |
-
" # Use this for new GPUs like Ampere, Hopper GPUs (RTX 30xx, RTX 40xx, A100, H100, L40)\n",
|
| 36 |
-
" !pip install --no-deps packaging ninja einops flash-attn xformers trl peft accelerate bitsandbytes\n",
|
| 37 |
-
"else:\n",
|
| 38 |
-
" # Use this for older GPUs (V100, Tesla T4, RTX 20xx)\n",
|
| 39 |
-
" !pip install --no-deps xformers trl peft accelerate bitsandbytes\n",
|
| 40 |
-
"pass"
|
| 41 |
]
|
| 42 |
},
|
| 43 |
{
|
|
|
|
| 27 |
"outputs": [],
|
| 28 |
"source": [
|
| 29 |
"%%capture\n",
|
| 30 |
+
"# Installs Unsloth, Xformers (Flash Attention) and all other packages\n!",
|
| 31 |
+
"!pip install unsloth\n",
|
| 32 |
+
"# Get latest Unsloth\n",
|
| 33 |
+
"!pip install --upgrade --no-deps \"unsloth[colab-new] @ git+https://github.com/unslothai/unsloth.git\"\n"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 34 |
]
|
| 35 |
},
|
| 36 |
{
|
Alpaca_+_TinyLlama_+_RoPE_Scaling_full_example.ipynb
CHANGED
|
@@ -29,17 +29,10 @@
|
|
| 29 |
"outputs": [],
|
| 30 |
"source": [
|
| 31 |
"%%capture\n",
|
| 32 |
-
"
|
| 33 |
-
"
|
| 34 |
-
"#
|
| 35 |
-
"!pip install \"unsloth[colab-new] @ git+https://github.com/unslothai/unsloth.git\"\n"
|
| 36 |
-
"if major_version >= 8:\n",
|
| 37 |
-
" # Use this for new GPUs like Ampere, Hopper GPUs (RTX 30xx, RTX 40xx, A100, H100, L40)\n",
|
| 38 |
-
" !pip install --no-deps packaging ninja einops flash-attn xformers trl peft accelerate bitsandbytes\n",
|
| 39 |
-
"else:\n",
|
| 40 |
-
" # Use this for older GPUs (V100, Tesla T4, RTX 20xx)\n",
|
| 41 |
-
" !pip install --no-deps xformers trl peft accelerate bitsandbytes\n",
|
| 42 |
-
"pass"
|
| 43 |
]
|
| 44 |
},
|
| 45 |
{
|
|
|
|
| 29 |
"outputs": [],
|
| 30 |
"source": [
|
| 31 |
"%%capture\n",
|
| 32 |
+
"# Installs Unsloth, Xformers (Flash Attention) and all other packages\n!",
|
| 33 |
+
"!pip install unsloth\n",
|
| 34 |
+
"# Get latest Unsloth\n",
|
| 35 |
+
"!pip install --upgrade --no-deps \"unsloth[colab-new] @ git+https://github.com/unslothai/unsloth.git\"\n"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
]
|
| 37 |
},
|
| 38 |
{
|
ChatML_+_chat_templates_+_Mistral_7b_full_example.ipynb
CHANGED
|
@@ -29,17 +29,10 @@
|
|
| 29 |
"outputs": [],
|
| 30 |
"source": [
|
| 31 |
"%%capture\n",
|
| 32 |
-
"
|
| 33 |
-
"
|
| 34 |
-
"#
|
| 35 |
-
"!pip install \"unsloth[colab-new] @ git+https://github.com/unslothai/unsloth.git\"\n"
|
| 36 |
-
"if major_version >= 8:\n",
|
| 37 |
-
" # Use this for new GPUs like Ampere, Hopper GPUs (RTX 30xx, RTX 40xx, A100, H100, L40)\n",
|
| 38 |
-
" !pip install --no-deps packaging ninja einops flash-attn xformers trl peft accelerate bitsandbytes\n",
|
| 39 |
-
"else:\n",
|
| 40 |
-
" # Use this for older GPUs (V100, Tesla T4, RTX 20xx)\n",
|
| 41 |
-
" !pip install --no-deps xformers trl peft accelerate bitsandbytes\n",
|
| 42 |
-
"pass"
|
| 43 |
]
|
| 44 |
},
|
| 45 |
{
|
|
|
|
| 29 |
"outputs": [],
|
| 30 |
"source": [
|
| 31 |
"%%capture\n",
|
| 32 |
+
"# Installs Unsloth, Xformers (Flash Attention) and all other packages\n!",
|
| 33 |
+
"!pip install unsloth\n",
|
| 34 |
+
"# Get latest Unsloth\n",
|
| 35 |
+
"!pip install --upgrade --no-deps \"unsloth[colab-new] @ git+https://github.com/unslothai/unsloth.git\"\n"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
]
|
| 37 |
},
|
| 38 |
{
|
DPO_Zephyr_Unsloth_Example.ipynb
CHANGED
|
@@ -28,17 +28,10 @@
|
|
| 28 |
"outputs": [],
|
| 29 |
"source": [
|
| 30 |
"%%capture\n",
|
| 31 |
-
"
|
| 32 |
-
"
|
| 33 |
-
"#
|
| 34 |
-
"!pip install \"unsloth[colab-new] @ git+https://github.com/unslothai/unsloth.git\"\n"
|
| 35 |
-
"if major_version >= 8:\n",
|
| 36 |
-
" # Use this for new GPUs like Ampere, Hopper GPUs (RTX 30xx, RTX 40xx, A100, H100, L40)\n",
|
| 37 |
-
" !pip install --no-deps packaging ninja einops flash-attn xformers trl peft accelerate bitsandbytes\n",
|
| 38 |
-
"else:\n",
|
| 39 |
-
" # Use this for older GPUs (V100, Tesla T4, RTX 20xx)\n",
|
| 40 |
-
" !pip install --no-deps xformers trl peft accelerate bitsandbytes\n",
|
| 41 |
-
"pass"
|
| 42 |
]
|
| 43 |
},
|
| 44 |
{
|
|
|
|
| 28 |
"outputs": [],
|
| 29 |
"source": [
|
| 30 |
"%%capture\n",
|
| 31 |
+
"# Installs Unsloth, Xformers (Flash Attention) and all other packages\n!",
|
| 32 |
+
"!pip install unsloth\n",
|
| 33 |
+
"# Get latest Unsloth\n",
|
| 34 |
+
"!pip install --upgrade --no-deps \"unsloth[colab-new] @ git+https://github.com/unslothai/unsloth.git\"\n"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 35 |
]
|
| 36 |
},
|
| 37 |
{
|
Mistral_7b_Text_Completion_Raw_Text_training_full_example(1).ipynb
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
Mistral_7b_Text_Completion_Raw_Text_training_full_example.ipynb
CHANGED
|
@@ -39,17 +39,10 @@
|
|
| 39 |
"outputs": [],
|
| 40 |
"source": [
|
| 41 |
"%%capture\n",
|
| 42 |
-
"
|
| 43 |
-
"
|
| 44 |
-
"#
|
| 45 |
-
"!pip install \"unsloth[colab-new] @ git+https://github.com/unslothai/unsloth.git\"\n"
|
| 46 |
-
"if major_version >= 8:\n",
|
| 47 |
-
" # Use this for new GPUs like Ampere, Hopper GPUs (RTX 30xx, RTX 40xx, A100, H100, L40)\n",
|
| 48 |
-
" !pip install --no-deps packaging ninja einops flash-attn xformers trl peft accelerate bitsandbytes\n",
|
| 49 |
-
"else:\n",
|
| 50 |
-
" # Use this for older GPUs (V100, Tesla T4, RTX 20xx)\n",
|
| 51 |
-
" !pip install --no-deps xformers trl peft accelerate bitsandbytes\n",
|
| 52 |
-
"pass"
|
| 53 |
]
|
| 54 |
},
|
| 55 |
{
|
|
|
|
| 39 |
"outputs": [],
|
| 40 |
"source": [
|
| 41 |
"%%capture\n",
|
| 42 |
+
"# Installs Unsloth, Xformers (Flash Attention) and all other packages\n!",
|
| 43 |
+
"!pip install unsloth\n",
|
| 44 |
+
"# Get latest Unsloth\n",
|
| 45 |
+
"!pip install --upgrade --no-deps \"unsloth[colab-new] @ git+https://github.com/unslothai/unsloth.git\"\n"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 46 |
]
|
| 47 |
},
|
| 48 |
{
|