cryptonaut commited on
Commit
35fb65d
·
verified ·
1 Parent(s): b94156c

Upload 2 files

Browse files

hf transformers Echo models patch for windows and linux

Files changed (2) hide show
  1. transformers+echo.ps1 +147 -0
  2. transformers+echo.sh +103 -0
transformers+echo.ps1 ADDED
@@ -0,0 +1,147 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # This PowerShell script installs the Hugging Face transformers library
3
+ # and patches it to include the 'Echo' model architecture from
4
+ # MythWorxAI. This allows you to load the model without needing
5
+ # the `trust_remote_code=True` flag.
6
+ #
7
+ # The script will:
8
+ # 1. Ensure dependencies like Python, pip, and web access are available.
9
+ # 2. Install the `transformers`, `torch`, and `accelerate` libraries.
10
+ # 3. Locate the installed `transformers` library path using a robust method.
11
+ # 4. Create a new directory for the 'echo' model inside the library.
12
+ # 5. Download the necessary model definition files from the Hugging Face Hub.
13
+ # 6. Create an __init__.py file to make the new model a proper module.
14
+ #
15
+
16
+ # Stop the script if any command fails
17
+ $ErrorActionPreference = "Stop"
18
+
19
+ # --- Helper Functions ---
20
+ function Get-CommandPath($command) {
21
+ return (Get-Command $command -ErrorAction SilentlyContinue).Source
22
+ }
23
+
24
+ # --- Dependency Checks ---
25
+ Write-Host "▶ Checking for required tools (python, pip)..."
26
+
27
+ $pythonCmdPath = Get-CommandPath "python"
28
+ if ([string]::IsNullOrWhiteSpace($pythonCmdPath)) {
29
+ Write-Host "❌ Error: Python is not installed or not in your PATH. Please install Python 3 and try again." -ForegroundColor Red
30
+ exit 1
31
+ }
32
+
33
+ $pipCmdPath = Get-CommandPath "pip"
34
+ if ([string]::IsNullOrWhiteSpace($pipCmdPath)) {
35
+ Write-Host "❌ Error: pip is not installed or not in your PATH. Please install pip and try again." -ForegroundColor Red
36
+ exit 1
37
+ }
38
+
39
+ Write-Host "✅ All tools are available." -ForegroundColor Green
40
+
41
+ # --- Installation ---
42
+ Write-Host "`n▶ Installing Hugging Face libraries (transformers, torch, accelerate)..."
43
+ # Use '&' to execute the command stored in the variable
44
+ & $pipCmdPath install transformers torch accelerate --quiet
45
+ Write-Host "✅ Libraries installed successfully." -ForegroundColor Green
46
+
47
+ # --- Patching ---
48
+ Write-Host "`n▶ Locating transformers installation..."
49
+
50
+ # Use `pip show` to robustly find the package location.
51
+ $pipShowOutput = & $pipCmdPath show transformers
52
+ $locationLine = $pipShowOutput | Where-Object { $_ -match '^Location:' }
53
+ $sitePackagesPath = ($locationLine -split ': ', 2)[1].Trim()
54
+
55
+ if ([string]::IsNullOrWhiteSpace($sitePackagesPath)) {
56
+ Write-Host "❌ Error: Could not determine transformers library location via 'pip show'." -ForegroundColor Red
57
+ exit 1
58
+ }
59
+
60
+ # The final path is the site-packages location + the 'transformers' directory name.
61
+ $transformersPath = Join-Path -Path $sitePackagesPath -ChildPath "transformers"
62
+
63
+ if (-not (Test-Path -Path $transformersPath -PathType Container)) {
64
+ Write-Host "❌ Error: The transformers directory was not found at the expected path: $transformersPath" -ForegroundColor Red
65
+ exit 1
66
+ }
67
+
68
+ Write-Host "✅ Found transformers at: $transformersPath" -ForegroundColor Green
69
+
70
+ # Use Join-Path to correctly build the model's path for Windows
71
+ $modelPath = Join-Path -Path $transformersPath -ChildPath "models\echo"
72
+
73
+ # Check if the directory already exists
74
+ if (Test-Path -Path $modelPath -PathType Container) {
75
+ Write-Host "✅ Patch directory '$modelPath' already exists. No action needed." -ForegroundColor Yellow
76
+ Write-Host "`n🎉 Patching complete! You can now use 'Echo' models." -ForegroundColor Cyan
77
+ exit 0
78
+ }
79
+
80
+ Write-Host "`n▶ Applying patch: Creating 'echo' model directory..."
81
+ # -Force ensures parent directories are created if they don't exist
82
+ New-Item -Path $modelPath -ItemType Directory -Force | Out-Null
83
+ Write-Host "✅ Directory created." -ForegroundColor Green
84
+
85
+ Write-Host "▶ Downloading model architecture files..."
86
+ $configUrl = "https://huggingface.co/MythWorxAI/Echo-mini/raw/main/configuration_echo.py"
87
+ $modelingUrl = "https://huggingface.co/MythWorxAI/Echo-mini/raw/main/modeling_echo.py"
88
+
89
+ $configOutFile = Join-Path -Path $modelPath -ChildPath "configuration_echo.py"
90
+ $modelingOutFile = Join-Path -Path $modelPath -ChildPath "modeling_echo.py"
91
+
92
+ # Define a User-Agent to mimic a browser
93
+ $userAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36"
94
+
95
+ # Ensure modern TLS security protocol is used
96
+ [Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
97
+
98
+ try {
99
+ Write-Host "Downloading from: $configUrl"
100
+ Invoke-WebRequest -Uri $configUrl -OutFile $configOutFile -UserAgent $userAgent -UseBasicParsing
101
+
102
+ Write-Host "Downloading from: $modelingUrl"
103
+ Invoke-WebRequest -Uri $modelingUrl -OutFile $modelingOutFile -UserAgent $userAgent -UseBasicParsing
104
+
105
+ Write-Host "✅ Model files downloaded." -ForegroundColor Green
106
+ }
107
+ catch {
108
+ # Provide more detailed error information
109
+ $errorMessage = "❌ Error downloading files. "
110
+ if ($_.Exception.Response) {
111
+ $statusCode = [int]$_.Exception.Response.StatusCode
112
+ $errorMessage += "Status Code: $statusCode. "
113
+ $statusDescription = $_.Exception.Response.StatusDescription
114
+ $errorMessage += "Description: $statusDescription."
115
+ } else {
116
+ $errorMessage += "Details: $($_.Exception.Message)"
117
+ }
118
+ Write-Host $errorMessage -ForegroundColor Red
119
+ exit 1
120
+ }
121
+
122
+ Write-Host "▶ Finalizing module structure..."
123
+ # Create an __init__.py to make `echo` a recognizable Python module
124
+ $initFile = Join-Path -Path $modelPath -ChildPath "__init__.py"
125
+ New-Item -Path $initFile -ItemType File -Force | Out-Null
126
+ Write-Host "✅ Module created." -ForegroundColor Green
127
+
128
+ # --- Completion ---
129
+ Write-Host "`n🎉 Patching complete! The 'transformers' library now natively supports 'echo' models." -ForegroundColor Cyan
130
+ Write-Host " You can now load 'MythWorxAI/Echo-mini' without 'trust_remote_code=True'."
131
+
132
+ # --- Test Code ---
133
+ Write-Host "`n🧪 To test the installation, run the following Python code:" -ForegroundColor Yellow
134
+ Write-Host @"
135
+
136
+ from transformers import AutoTokenizer, AutoModelForCausalLM
137
+
138
+ model_id = 'MythWorxAI/Echo-mini'
139
+ print(f"Loading model: {model_id}")
140
+
141
+ # This now works without trust_remote_code=True
142
+ tokenizer = AutoTokenizer.from_pretrained(model_id)
143
+ model = AutoModelForCausalLM.from_pretrained(model_id)
144
+
145
+ print('✅ Model and tokenizer loaded successfully!')
146
+ print(model.config)
147
+ "@
transformers+echo.sh ADDED
@@ -0,0 +1,103 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+ #
3
+ # This script installs the Hugging Face transformers library
4
+ # and patches it to include the 'Echo' model architecture from
5
+ # MythWorxAI. This allows you to load the model without needing
6
+ # the `trust_remote_code=True` flag.
7
+ #
8
+ # The script will:
9
+ # 1. Ensure dependencies like python, pip, and curl are available.
10
+ # 2. Install the `transformers`, `torch`, and `accelerate` libraries.
11
+ # 3. Locate the installed `transformers` library path.
12
+ # 4. Create a new directory for the 'echo' model inside the library.
13
+ # 5. Download the necessary model definition files from the Hugging Face Hub.
14
+ # 6. Create an __init__.py file to make the new model a proper module.
15
+ #
16
+
17
+ set -e # Exit immediately if a command exits with a non-zero status.
18
+
19
+ # --- Helper Functions ---
20
+ command_exists() {
21
+ command -v "$1" >/dev/null 2>&1
22
+ }
23
+
24
+ # --- Dependency Checks ---
25
+ echo "▶ Checking for required tools (python, pip, curl)..."
26
+ if ! command_exists python3 && ! command_exists python; then
27
+ echo "❌ Error: Python is not installed. Please install Python 3 and try again."
28
+ exit 1
29
+ fi
30
+ # Use python3 if available, otherwise fall back to python
31
+ PYTHON_CMD=$(command_exists python3 && echo "python3" || echo "python")
32
+
33
+ if ! command_exists pip3 && ! command_exists pip; then
34
+ echo "❌ Error: pip is not installed. Please install pip for Python 3 and try again."
35
+ exit 1
36
+ fi
37
+ PIP_CMD=$(command_exists pip3 && echo "pip3" || echo "pip")
38
+
39
+ if ! command_exists curl; then
40
+ echo "❌ Error: curl is not installed. Please install curl and try again."
41
+ exit 1
42
+ fi
43
+ echo "✅ All tools are available."
44
+
45
+ # --- Installation ---
46
+ echo -e "\n▶ Installing Hugging Face libraries (transformers, torch, accelerate)..."
47
+ $PIP_CMD install transformers torch accelerate --quiet
48
+ echo "✅ Libraries installed successfully."
49
+
50
+ # --- Patching ---
51
+ echo -e "\n▶ Locating transformers installation..."
52
+ TRANSFORMERS_PATH=$($PYTHON_CMD -c "import transformers, os; print(os.path.dirname(transformers.__file__))")
53
+
54
+ if [ -z "$TRANSFORMERS_PATH" ]; then
55
+ echo "❌ Error: Could not find the transformers library installation path."
56
+ exit 1
57
+ fi
58
+ echo "✅ Found transformers at: $TRANSFORMERS_PATH"
59
+
60
+ MODEL_PATH="$TRANSFORMERS_PATH/models/echo"
61
+
62
+ if [ -d "$MODEL_PATH" ]; then
63
+ echo "✅ Patch directory '$MODEL_PATH' already exists. No action needed."
64
+ echo -e "\n🎉 Patching complete! You can now use 'Echo' models."
65
+ exit 0
66
+ fi
67
+
68
+ echo -e "\n▶ Applying patch: Creating 'echo' model directory..."
69
+ mkdir -p "$MODEL_PATH"
70
+
71
+ echo "▶ Downloading model architecture files..."
72
+ CONFIG_URL="https://huggingface.co/MythWorxAI/Echo-mini/raw/main/configuration_echo.py"
73
+ MODELING_URL="https://huggingface.co/MythWorxAI/Echo-mini/raw/main/modeling_echo.py"
74
+
75
+ curl -fL "$CONFIG_URL" -o "$MODEL_PATH/configuration_echo.py"
76
+ curl -fL "$MODELING_URL" -o "$MODEL_PATH/modeling_echo.py"
77
+
78
+ echo "✅ Model files downloaded."
79
+
80
+ echo "▶ Finalizing module structure..."
81
+ # Create an __init__.py to make `echo` a recognizable Python module
82
+ # This is crucial for the relative imports inside the model files to work.
83
+ touch "$MODEL_PATH/__init__.py"
84
+ echo "✅ Module created."
85
+
86
+ # --- Completion ---
87
+ echo -e "\n🎉 Patching complete! The 'transformers' library now natively supports 'echo' models."
88
+ echo " You can now load 'MythWorxAI/Echo-mini' without 'trust_remote_code=True'."
89
+
90
+ echo -e "\n🧪 To test the installation, run the following Python code:"
91
+ echo
92
+ echo "from transformers import AutoTokenizer, AutoModelForCausalLM"
93
+ echo
94
+ echo "model_id = 'MythWorxAI/Echo-mini'"
95
+ echo "print(f\"Loading model: {model_id}\")"
96
+ echo
97
+ echo "# This now works without trust_remote_code=True"
98
+ echo "tokenizer = AutoTokenizer.from_pretrained(model_id)"
99
+ echo "model = AutoModelForCausalLM.from_pretrained(model_id)"
100
+ echo
101
+ echo "print('✅ Model and tokenizer loaded successfully!')"
102
+ echo "print(model.config)"
103
+