Gaurav vashistha commited on
Commit
14589fa
·
1 Parent(s): ff0e767

Add N8N Integration

Browse files
.gitignore CHANGED
@@ -19,3 +19,5 @@ Thumbs.db
19
  # Editor Directories
20
  .vscode/
21
  .idea/
 
 
 
19
  # Editor Directories
20
  .vscode/
21
  .idea/
22
+
23
+ *.jpg
add_license.py ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import subprocess
3
+
4
+ def run_command(command):
5
+ try:
6
+ # shell=True is often required on Windows for some commands/environments
7
+ print(f"Running: {command}")
8
+ result = subprocess.run(command, check=True, shell=True, capture_output=True, text=True)
9
+ print(result.stdout)
10
+ except subprocess.CalledProcessError as e:
11
+ print(f"Error running: {command}")
12
+ print(e.stderr)
13
+ # We don't exit here to allow attempting subsequent commands or user debugging if one fails,
14
+ # though for git flow it usually makes sense to stop.
15
+ # Given the instruction is a sequence, we should probably stop if add/commit fails.
16
+ exit(1)
17
+
18
+ def main():
19
+ license_text = """MIT License
20
+
21
+ Copyright (c) 2025 Bhishaj
22
+
23
+ Permission is hereby granted, free of charge, to any person obtaining a copy
24
+ of this software and associated documentation files (the "Software"), to deal
25
+ in the Software without restriction, including without limitation the rights
26
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
27
+ copies of the Software, and to permit persons to whom the Software is
28
+ furnished to do so, subject to the following conditions:
29
+
30
+ The above copyright notice and this permission notice shall be included in all
31
+ copies or substantial portions of the Software.
32
+
33
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
34
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
35
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
36
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
37
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
38
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
39
+ SOFTWARE.
40
+ """
41
+
42
+ file_path = "LICENSE"
43
+
44
+ print(f"Creating {file_path}...")
45
+ with open(file_path, "w", encoding="utf-8") as f:
46
+ f.write(license_text)
47
+ print(f"{file_path} created successfully.")
48
+
49
+ print("Running git commands...")
50
+
51
+ # 1. git add LICENSE
52
+ run_command("git add LICENSE")
53
+
54
+ # 2. git commit -m 'Add MIT License'
55
+ run_command("git commit -m \"Add MIT License\"")
56
+
57
+ # 3. git push space clean_deploy:main
58
+ print("Pushing to Hugging Face Space (this might take a few seconds)...")
59
+ run_command("git push space clean_deploy:main")
60
+
61
+ print("Done! License added and pushed to Hugging Face Space.")
62
+
63
+ if __name__ == "__main__":
64
+ main()
connect_n8n.py ADDED
@@ -0,0 +1,126 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import subprocess
3
+
4
+ def update_requirements():
5
+ req_file = "requirements.txt"
6
+ if not os.path.exists(req_file):
7
+ with open(req_file, "w") as f:
8
+ f.write("httpx\n")
9
+ print(f"Created {req_file} with httpx.")
10
+ return
11
+
12
+ with open(req_file, "r") as f:
13
+ content = f.read()
14
+
15
+ if "httpx" not in content:
16
+ with open(req_file, "a") as f:
17
+ f.write("\nhttpx\n")
18
+ print("Appended httpx to requirements.txt.")
19
+ else:
20
+ print("httpx already in requirements.txt.")
21
+
22
+ def update_main():
23
+ main_content = r'''import os
24
+ import httpx
25
+ import asyncio
26
+ from fastapi import FastAPI, UploadFile, File
27
+ from fastapi.responses import HTMLResponse, JSONResponse
28
+ from dotenv import load_dotenv
29
+ # Import Agents
30
+ from agents.visual_analyst import VisualAnalyst
31
+ from agents.memory_agent import MemoryAgent
32
+ from agents.writer_agent import WriterAgent
33
+ load_dotenv()
34
+ app = FastAPI()
35
+ # Initialize Agents
36
+ try:
37
+ visual_agent = VisualAnalyst()
38
+ memory_agent = MemoryAgent()
39
+ writer_agent = WriterAgent()
40
+ memory_agent.seed_database()
41
+ print("✅ All Agents Online")
42
+ except Exception as e:
43
+ print(f"⚠️ Agent Startup Warning: {e}")
44
+ @app.get("/", response_class=HTMLResponse)
45
+ async def read_root():
46
+ try:
47
+ with open("dashboard.html", "r") as f:
48
+ return f.read()
49
+ except FileNotFoundError:
50
+ return "<h1>Error: dashboard.html not found</h1>"
51
+ @app.post("/generate-catalog")
52
+ async def generate_catalog(file: UploadFile = File(...)):
53
+ try:
54
+ # 1. Save Temp File
55
+ os.makedirs("uploads", exist_ok=True)
56
+ file_path = f"uploads/{file.filename}"
57
+ with open(file_path, "wb") as f:
58
+ f.write(await file.read())
59
+ # 2. Run AI Pipeline
60
+ visual_data = await visual_agent.analyze_image(file_path)
61
+
62
+ query = f"{visual_data.get('main_color', '')} {visual_data.get('product_type', 'product')}"
63
+ seo_keywords = memory_agent.retrieve_keywords(query)
64
+
65
+ listing = writer_agent.write_listing(visual_data, seo_keywords)
66
+
67
+ # 3. Construct Final Payload
68
+ final_data = {
69
+ "visual_data": visual_data,
70
+ "seo_keywords": seo_keywords,
71
+ "listing": listing
72
+ }
73
+ # 4. ⚡ N8N AUTOMATION TRIGGER ⚡
74
+ n8n_url = os.getenv("N8N_WEBHOOK_URL")
75
+ if n8n_url:
76
+ print(f"🚀 Sending data to N8N: {n8n_url}")
77
+ # Fire and forget (don't make the user wait for n8n)
78
+ asyncio.create_task(send_to_n8n(n8n_url, final_data))
79
+
80
+ # Cleanup
81
+ if os.path.exists(file_path):
82
+ os.remove(file_path)
83
+
84
+ return JSONResponse(content=final_data)
85
+ except Exception as e:
86
+ return JSONResponse(content={"error": str(e)}, status_code=500)
87
+ # Async Helper to send data without blocking
88
+ async def send_to_n8n(url, data):
89
+ try:
90
+ async with httpx.AsyncClient() as client:
91
+ await client.post(url, json=data, timeout=5.0)
92
+ print("✅ N8N Webhook Sent Successfully")
93
+ except Exception as e:
94
+ print(f"❌ N8N Webhook Failed: {e}")
95
+ if __name__ == "__main__":
96
+ import uvicorn
97
+ uvicorn.run(app, host="0.0.0.0", port=7860)
98
+ '''
99
+ with open("main.py", "w", encoding="utf-8") as f:
100
+ f.write(main_content)
101
+ print("Updated main.py with N8N integration logic.")
102
+
103
+ def deploy():
104
+ try:
105
+ subprocess.run(["git", "add", "."], check=True)
106
+ # Check if there are changes to commit
107
+ status = subprocess.run(["git", "status", "--porcelain"], capture_output=True, text=True)
108
+ if status.stdout.strip():
109
+ subprocess.run(["git", "commit", "-m", "Add N8N Integration"], check=True)
110
+ print("Git commit successful.")
111
+ else:
112
+ print("No changes to commit.")
113
+
114
+ print("Pushing to space...")
115
+ subprocess.run(["git", "push", "space", "clean_deploy:main"], check=True)
116
+ print("✅ Successfully deployed to Hugging Face Space.")
117
+
118
+ except subprocess.CalledProcessError as e:
119
+ print(f"❌ Deployment failed: {e}")
120
+
121
+ if __name__ == "__main__":
122
+ print("Starting N8N Integration Setup...")
123
+ update_requirements()
124
+ update_main()
125
+ deploy()
126
+ print("✅ connect_n8n.py completed.")
create_dockerfile.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import subprocess
2
+ import sys
3
+
4
+ def run_command(command):
5
+ print(f"Running: {command}")
6
+ try:
7
+ # shell=True allows us to run the command string exactly as provided
8
+ subprocess.run(command, shell=True, check=True)
9
+ except subprocess.CalledProcessError as e:
10
+ print(f"Error executing command '{command}': {e}")
11
+ sys.exit(1)
12
+
13
+ def main():
14
+ # 1. Create Dockerfile
15
+ dockerfile_content = """FROM python:3.9
16
+ WORKDIR /code
17
+ COPY ./requirements.txt /code/requirements.txt
18
+ RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
19
+ COPY . /code
20
+ # Fix permissions for libraries that write to home
21
+ RUN mkdir -p /tmp/home
22
+ ENV HOME=/tmp/home
23
+ # Start the FastAPI server on port 7860 (required by Hugging Face)
24
+ CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
25
+ """
26
+
27
+ print("Creating Dockerfile...")
28
+ try:
29
+ with open("Dockerfile", "w", newline='\n') as f:
30
+ f.write(dockerfile_content)
31
+ print("Dockerfile created successfully.")
32
+ except Exception as e:
33
+ print(f"Failed to create Dockerfile: {e}")
34
+ sys.exit(1)
35
+
36
+ # 2. Push to Space
37
+ print("Executing Git commands...")
38
+ commands = [
39
+ 'git add Dockerfile',
40
+ 'git commit -m "Add Dockerfile for Hugging Face deployment"',
41
+ 'git push -f space clean_deploy:main'
42
+ ]
43
+
44
+ for cmd in commands:
45
+ run_command(cmd)
46
+
47
+ print("\ncreate_dockerfile.py execution completed.")
48
+
49
+ if __name__ == "__main__":
50
+ main()
final_deploy_push.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import subprocess
2
+ import sys
3
+
4
+ # Force UTF-8 output for Windows terminals
5
+ sys.stdout.reconfigure(encoding='utf-8')
6
+
7
+ def deploy():
8
+ print("⚠️ Ensure you are inside the D:\\Projects\\MerchFlow AI directory before running this!")
9
+
10
+ command = "git push --force space clean_deploy:main"
11
+ print(f"\nRunning: {command} ...")
12
+
13
+ try:
14
+ subprocess.run(command, check=True, shell=True)
15
+ print("\n✅ Successfully pushed to Space!")
16
+ except subprocess.CalledProcessError as e:
17
+ print(f"\n❌ Push failed: {e}")
18
+
19
+ if __name__ == "__main__":
20
+ deploy()
fix_dashboard_routing.py ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import subprocess
3
+
4
+ def main():
5
+ # Define the content for main.py
6
+ main_py_content = """import os
7
+ from fastapi import FastAPI, UploadFile, File, HTTPException
8
+ from fastapi.responses import HTMLResponse, JSONResponse
9
+ from fastapi.staticfiles import StaticFiles
10
+ from agents.visual_analyst import VisualAnalyst
11
+ from dotenv import load_dotenv
12
+ # Load environment variables
13
+ load_dotenv()
14
+ app = FastAPI()
15
+ # Initialize Agent
16
+ visual_agent = VisualAnalyst()
17
+ # 1. READ THE DASHBOARD HTML FILE INTO MEMORY
18
+ try:
19
+ with open("dashboard.html", "r") as f:
20
+ dashboard_html = f.read()
21
+ except FileNotFoundError:
22
+ dashboard_html = "<h1>Error: dashboard.html not found. Please ensure the file exists.</h1>"
23
+ # 2. SERVE DASHBOARD AT ROOT (Home Page)
24
+ @app.get("/", response_class=HTMLResponse)
25
+ async def read_root():
26
+ return dashboard_html
27
+ # 3. KEEP /dashboard ROUTE AS BACKUP
28
+ @app.get("/dashboard", response_class=HTMLResponse)
29
+ async def read_dashboard():
30
+ return dashboard_html
31
+ @app.post("/analyze")
32
+ async def analyze_merch(file: UploadFile = File(...)):
33
+ try:
34
+ os.makedirs("uploads", exist_ok=True)
35
+ file_path = f"uploads/{file.filename}"
36
+ with open(file_path, "wb") as f:
37
+ f.write(await file.read())
38
+ result = await visual_agent.analyze_image(file_path)
39
+
40
+ if os.path.exists(file_path):
41
+ os.remove(file_path)
42
+
43
+ return JSONResponse(content=result)
44
+ except Exception as e:
45
+ return JSONResponse(content={"error": str(e)}, status_code=500)
46
+ if __name__ == "__main__":
47
+ import uvicorn
48
+ uvicorn.run(app, host="0.0.0.0", port=7860)
49
+ """
50
+
51
+ # Overwrite main.py
52
+ print("Overwriting main.py...")
53
+ try:
54
+ with open("main.py", "w", encoding="utf-8") as f:
55
+ f.write(main_py_content)
56
+ print("Successfully updated main.py")
57
+ except Exception as e:
58
+ print(f"Error writing main.py: {e}")
59
+ return
60
+
61
+ # Define git commands
62
+ git_commands = [
63
+ ["git", "add", "main.py"],
64
+ ["git", "commit", "-m", "Fix dashboard 404 by serving HTML at root"],
65
+ ["git", "push", "space", "clean_deploy:main"]
66
+ ]
67
+
68
+ # Run git commands
69
+ print("\nRunning git commands...")
70
+ for cmd in git_commands:
71
+ print(f"Executing: {' '.join(cmd)}")
72
+ try:
73
+ subprocess.run(cmd, check=True)
74
+ except subprocess.CalledProcessError as e:
75
+ print(f"Command failed: {e}")
76
+ # If commit fails (e.g. nothing to commit), we might want to continue or stop.
77
+ # But push should definitely happen if commit works.
78
+ # If commit fails because "nothing to commit, working tree clean", push might still be relevant if previous commit wasn't pushed?
79
+ # But the user logic implies we just made a change to main.py, so commit should succeed unless main.py was ALREADY this content.
80
+ # We will continue to try push even if commit fails, just in case.
81
+ # But wait, if commit fails, push might proceed.
82
+ pass
83
+
84
+ print("\nfix_dashboard_routing.py completed.")
85
+
86
+ if __name__ == "__main__":
87
+ main()
fix_google_key.py ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+
4
+ # Force UTF-8 output for Windows terminals
5
+ sys.stdout.reconfigure(encoding='utf-8')
6
+
7
+ # 1. Update .env
8
+ env_path = ".env"
9
+ key = "GOOGLE_API_KEY"
10
+ value = "AIzaSyDgIkagGBciWNZDTn07OlfY9tVPvo6KJ1on"
11
+
12
+ print(f"Updating {key} in .env...")
13
+
14
+ lines = []
15
+ if os.path.exists(env_path):
16
+ with open(env_path, "r", encoding="utf-8") as f:
17
+ lines = f.readlines()
18
+
19
+ found = False
20
+ new_lines = []
21
+ for line in lines:
22
+ if line.startswith(f"{key}="):
23
+ new_lines.append(f"{key}={value}\n")
24
+ found = True
25
+ else:
26
+ new_lines.append(line)
27
+
28
+ if not found:
29
+ if new_lines and not new_lines[-1].endswith('\n'):
30
+ new_lines.append('\n')
31
+ new_lines.append(f"{key}={value}\n")
32
+
33
+ with open(env_path, "w", encoding="utf-8") as f:
34
+ f.writelines(new_lines)
35
+
36
+ print(f"✅ Updated {key} in .env")
37
+
38
+ # 2. Upload to Cloud
39
+ print("Syncing secrets to Hugging Face Space...")
40
+ try:
41
+ # Build path to ensure we can import upload_secrets
42
+ sys.path.append(os.getcwd())
43
+ from upload_secrets import upload_secrets
44
+
45
+ upload_secrets()
46
+ print("✅ Google Key saved locally and uploaded to Hugging Face!")
47
+ except Exception as e:
48
+ print(f"❌ Failed to sync: {e}")
fix_readme.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+ import subprocess
4
+
5
+ # Force UTF-8 output for Windows terminals
6
+ sys.stdout.reconfigure(encoding='utf-8')
7
+
8
+ readme_content = """---
9
+ title: MerchFlow AI
10
+ emoji: 🚀
11
+ colorFrom: blue
12
+ colorTo: indigo
13
+ sdk: docker
14
+ pinned: false
15
+ ---
16
+ # MerchFlow AI
17
+ An AI-powered merchandising agent.
18
+ """
19
+
20
+ def run_command(command):
21
+ print(f"Running: {command}")
22
+ try:
23
+ subprocess.run(command, check=True, shell=True)
24
+ print("✅ Success")
25
+ except subprocess.CalledProcessError as e:
26
+ print(f"❌ Error: {e}")
27
+ # Don't exit, try to continue or let user see error
28
+
29
+ def fix_readme():
30
+ print("Writing README.md...")
31
+ with open("README.md", "w", encoding="utf-8") as f:
32
+ f.write(readme_content)
33
+ print("✅ Created README.md")
34
+
35
+ print("Deploying changes...")
36
+ run_command("git add README.md")
37
+ run_command('git commit -m "Add Hugging Face configuration"')
38
+ run_command("git push space clean_deploy:main")
39
+ print("✅ Configuration fixed and pushed!")
40
+
41
+ if __name__ == "__main__":
42
+ fix_readme()
fix_vision_core.py ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import subprocess
3
+
4
+ def fix_vision_core():
5
+ # Content for agents/visual_analyst.py
6
+ content = """import os
7
+ import json
8
+ import asyncio
9
+ import google.generativeai as genai
10
+ from PIL import Image
11
+ from dotenv import load_dotenv
12
+
13
+ load_dotenv()
14
+
15
+ class VisualAnalyst:
16
+ def __init__(self):
17
+ api_key = os.getenv("GEMINI_API_KEY")
18
+ if not api_key:
19
+ print("⚠️ GEMINI_API_KEY missing")
20
+
21
+ genai.configure(api_key=api_key)
22
+ # Use the modern, faster Flash model
23
+ self.model = genai.GenerativeModel('gemini-1.5-flash')
24
+
25
+ async def analyze_image(self, image_path: str):
26
+ print(f"👁️ Analyzing image: {image_path}")
27
+
28
+ try:
29
+ # 1. Load image properly with Pillow (Fixes format issues)
30
+ img = Image.open(image_path)
31
+
32
+ # 2. Define the prompt
33
+ prompt = \"\"\"
34
+ Analyze this product image for an e-commerce listing.
35
+ Return ONLY a raw JSON object (no markdown formatting) with this structure:
36
+ {
37
+ "main_color": "string",
38
+ "product_type": "string",
39
+ "design_style": "string (minimalist, streetwear, vintage, etc)",
40
+ "visual_features": ["list", "of", "visible", "features"],
41
+ "suggested_title": "creative product title",
42
+ "condition_guess": "new/used"
43
+ }
44
+ \"\"\"
45
+
46
+ # 3. Run in a thread to prevent blocking (Sync to Async wrapper)
47
+ response = await asyncio.to_thread(
48
+ self.model.generate_content,
49
+ [prompt, img]
50
+ )
51
+
52
+ # 4. Clean and Parse JSON
53
+ text_response = response.text.replace('```json', '').replace('```', '').strip()
54
+ return json.loads(text_response)
55
+ except Exception as e:
56
+ print(f"❌ Vision Error: {e}")
57
+ # Return a Safe Fallback (Simulation)
58
+ return {
59
+ "main_color": "Unknown",
60
+ "product_type": "Unidentified Item",
61
+ "design_style": "Standard",
62
+ "visual_features": ["Error analyzing image"],
63
+ "suggested_title": "Manual Review Needed",
64
+ "condition_guess": "New"
65
+ }
66
+ """
67
+ # Write the file
68
+ os.makedirs("agents", exist_ok=True)
69
+ with open("agents/visual_analyst.py", "w", encoding="utf-8") as f:
70
+ f.write(content)
71
+ print("✅ agents/visual_analyst.py updated.")
72
+
73
+ # Git operations
74
+ print("🚀 Pushing to HuggingFace...")
75
+ commands = [
76
+ ["git", "add", "agents/visual_analyst.py"],
77
+ ["git", "commit", "-m", "Fix vision core and error handling"],
78
+ ["git", "push", "space", "clean_deploy:main"]
79
+ ]
80
+
81
+ for cmd in commands:
82
+ try:
83
+ print(f"Running: {' '.join(cmd)}")
84
+ subprocess.run(cmd, check=True)
85
+ except subprocess.CalledProcessError as e:
86
+ print(f"⚠️ Command failed: {e}")
87
+ # Continue even if commit fails (e.g. prompt already applied)
88
+
89
+ if __name__ == "__main__":
90
+ fix_vision_core()
main.py CHANGED
@@ -1,72 +1,75 @@
1
  import os
 
 
2
  from fastapi import FastAPI, UploadFile, File
3
  from fastapi.responses import HTMLResponse, JSONResponse
4
  from dotenv import load_dotenv
5
-
6
  # Import Agents
7
  from agents.visual_analyst import VisualAnalyst
8
  from agents.memory_agent import MemoryAgent
9
  from agents.writer_agent import WriterAgent
10
-
11
  load_dotenv()
12
  app = FastAPI()
13
-
14
- # Initialize All Agents
15
  try:
16
  visual_agent = VisualAnalyst()
17
  memory_agent = MemoryAgent()
18
  writer_agent = WriterAgent()
19
-
20
- # Seed memory on startup
21
  memory_agent.seed_database()
22
  print("✅ All Agents Online")
23
  except Exception as e:
24
- print(f"⚠️ Warning: Some agents failed to load: {e}")
25
-
26
- # 1. SERVE DASHBOARD AT ROOT
27
  @app.get("/", response_class=HTMLResponse)
28
  async def read_root():
29
  try:
30
  with open("dashboard.html", "r") as f:
31
  return f.read()
32
  except FileNotFoundError:
33
- return "Error: dashboard.html not found"
34
-
35
- # 2. THE MAIN ORCHESTRATOR ENDPOINT
36
  @app.post("/generate-catalog")
37
  async def generate_catalog(file: UploadFile = File(...)):
38
  try:
39
- # A. Save Temp File
40
  os.makedirs("uploads", exist_ok=True)
41
  file_path = f"uploads/{file.filename}"
42
  with open(file_path, "wb") as f:
43
  f.write(await file.read())
44
-
45
- # B. Visual Analysis (The Eyes)
46
  visual_data = await visual_agent.analyze_image(file_path)
47
 
48
- # C. Memory Search (The Context)
49
- # Create a search query from visual data
50
  query = f"{visual_data.get('main_color', '')} {visual_data.get('product_type', 'product')}"
51
  seo_keywords = memory_agent.retrieve_keywords(query)
52
 
53
- # D. Write Copy (The Brain)
54
  listing = writer_agent.write_listing(visual_data, seo_keywords)
55
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56
  # Cleanup
57
  if os.path.exists(file_path):
58
  os.remove(file_path)
59
 
60
- # Return Full Data Structure
61
- return JSONResponse(content={
62
- "visual_data": visual_data,
63
- "seo_keywords": seo_keywords,
64
- "listing": listing
65
- })
66
  except Exception as e:
67
- print(f"Error: {e}")
68
  return JSONResponse(content={"error": str(e)}, status_code=500)
69
-
 
 
 
 
 
 
 
70
  if __name__ == "__main__":
71
  import uvicorn
72
  uvicorn.run(app, host="0.0.0.0", port=7860)
 
1
  import os
2
+ import httpx
3
+ import asyncio
4
  from fastapi import FastAPI, UploadFile, File
5
  from fastapi.responses import HTMLResponse, JSONResponse
6
  from dotenv import load_dotenv
 
7
  # Import Agents
8
  from agents.visual_analyst import VisualAnalyst
9
  from agents.memory_agent import MemoryAgent
10
  from agents.writer_agent import WriterAgent
 
11
  load_dotenv()
12
  app = FastAPI()
13
+ # Initialize Agents
 
14
  try:
15
  visual_agent = VisualAnalyst()
16
  memory_agent = MemoryAgent()
17
  writer_agent = WriterAgent()
 
 
18
  memory_agent.seed_database()
19
  print("✅ All Agents Online")
20
  except Exception as e:
21
+ print(f"⚠️ Agent Startup Warning: {e}")
 
 
22
  @app.get("/", response_class=HTMLResponse)
23
  async def read_root():
24
  try:
25
  with open("dashboard.html", "r") as f:
26
  return f.read()
27
  except FileNotFoundError:
28
+ return "<h1>Error: dashboard.html not found</h1>"
 
 
29
  @app.post("/generate-catalog")
30
  async def generate_catalog(file: UploadFile = File(...)):
31
  try:
32
+ # 1. Save Temp File
33
  os.makedirs("uploads", exist_ok=True)
34
  file_path = f"uploads/{file.filename}"
35
  with open(file_path, "wb") as f:
36
  f.write(await file.read())
37
+ # 2. Run AI Pipeline
 
38
  visual_data = await visual_agent.analyze_image(file_path)
39
 
 
 
40
  query = f"{visual_data.get('main_color', '')} {visual_data.get('product_type', 'product')}"
41
  seo_keywords = memory_agent.retrieve_keywords(query)
42
 
 
43
  listing = writer_agent.write_listing(visual_data, seo_keywords)
44
 
45
+ # 3. Construct Final Payload
46
+ final_data = {
47
+ "visual_data": visual_data,
48
+ "seo_keywords": seo_keywords,
49
+ "listing": listing
50
+ }
51
+ # 4. ⚡ N8N AUTOMATION TRIGGER ⚡
52
+ n8n_url = os.getenv("N8N_WEBHOOK_URL")
53
+ if n8n_url:
54
+ print(f"🚀 Sending data to N8N: {n8n_url}")
55
+ # Fire and forget (don't make the user wait for n8n)
56
+ asyncio.create_task(send_to_n8n(n8n_url, final_data))
57
+
58
  # Cleanup
59
  if os.path.exists(file_path):
60
  os.remove(file_path)
61
 
62
+ return JSONResponse(content=final_data)
 
 
 
 
 
63
  except Exception as e:
 
64
  return JSONResponse(content={"error": str(e)}, status_code=500)
65
+ # Async Helper to send data without blocking
66
+ async def send_to_n8n(url, data):
67
+ try:
68
+ async with httpx.AsyncClient() as client:
69
+ await client.post(url, json=data, timeout=5.0)
70
+ print("✅ N8N Webhook Sent Successfully")
71
+ except Exception as e:
72
+ print(f"❌ N8N Webhook Failed: {e}")
73
  if __name__ == "__main__":
74
  import uvicorn
75
  uvicorn.run(app, host="0.0.0.0", port=7860)
requirements.txt CHANGED
@@ -11,3 +11,5 @@ python-dotenv
11
  google-generativeai
12
  groq
13
  Pillow
 
 
 
11
  google-generativeai
12
  groq
13
  Pillow
14
+
15
+ httpx
restore_full_brain.py ADDED
@@ -0,0 +1,122 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import subprocess
3
+
4
+ def restore_main():
5
+ content = """import os
6
+ from fastapi import FastAPI, UploadFile, File
7
+ from fastapi.responses import HTMLResponse, JSONResponse
8
+ from dotenv import load_dotenv
9
+
10
+ # Import Agents
11
+ from agents.visual_analyst import VisualAnalyst
12
+ from agents.memory_agent import MemoryAgent
13
+ from agents.writer_agent import WriterAgent
14
+
15
+ load_dotenv()
16
+ app = FastAPI()
17
+
18
+ # Initialize All Agents
19
+ try:
20
+ visual_agent = VisualAnalyst()
21
+ memory_agent = MemoryAgent()
22
+ writer_agent = WriterAgent()
23
+
24
+ # Seed memory on startup
25
+ memory_agent.seed_database()
26
+ print("✅ All Agents Online")
27
+ except Exception as e:
28
+ print(f"⚠️ Warning: Some agents failed to load: {e}")
29
+
30
+ # 1. SERVE DASHBOARD AT ROOT
31
+ @app.get("/", response_class=HTMLResponse)
32
+ async def read_root():
33
+ try:
34
+ with open("dashboard.html", "r") as f:
35
+ return f.read()
36
+ except FileNotFoundError:
37
+ return "Error: dashboard.html not found"
38
+
39
+ # 2. THE MAIN ORCHESTRATOR ENDPOINT
40
+ @app.post("/generate-catalog")
41
+ async def generate_catalog(file: UploadFile = File(...)):
42
+ try:
43
+ # A. Save Temp File
44
+ os.makedirs("uploads", exist_ok=True)
45
+ file_path = f"uploads/{file.filename}"
46
+ with open(file_path, "wb") as f:
47
+ f.write(await file.read())
48
+
49
+ # B. Visual Analysis (The Eyes)
50
+ visual_data = await visual_agent.analyze_image(file_path)
51
+
52
+ # C. Memory Search (The Context)
53
+ # Create a search query from visual data
54
+ query = f"{visual_data.get('main_color', '')} {visual_data.get('product_type', 'product')}"
55
+ seo_keywords = memory_agent.retrieve_keywords(query)
56
+
57
+ # D. Write Copy (The Brain)
58
+ listing = writer_agent.write_listing(visual_data, seo_keywords)
59
+
60
+ # Cleanup
61
+ if os.path.exists(file_path):
62
+ os.remove(file_path)
63
+
64
+ # Return Full Data Structure
65
+ return JSONResponse(content={
66
+ "visual_data": visual_data,
67
+ "seo_keywords": seo_keywords,
68
+ "listing": listing
69
+ })
70
+ except Exception as e:
71
+ print(f"Error: {e}")
72
+ return JSONResponse(content={"error": str(e)}, status_code=500)
73
+
74
+ if __name__ == "__main__":
75
+ import uvicorn
76
+ uvicorn.run(app, host="0.0.0.0", port=7860)
77
+ """
78
+ with open("main.py", "w", encoding="utf-8") as f:
79
+ f.write(content)
80
+ print("✅ main.py restored with full agent logic.")
81
+
82
+ def update_dashboard():
83
+ try:
84
+ with open("dashboard.html", "r", encoding="utf-8") as f:
85
+ content = f.read()
86
+
87
+ # Replace localhost URL with relative path
88
+ new_content = content.replace("http://localhost:8000/generate-catalog", "/generate-catalog")
89
+
90
+ with open("dashboard.html", "w", encoding="utf-8") as f:
91
+ f.write(new_content)
92
+ print("✅ dashboard.html updated for cloud deployment.")
93
+ except Exception as e:
94
+ print(f"❌ Error updating dashboard.html: {e}")
95
+
96
+ def deploy():
97
+ print("🚀 Starting Deployment...")
98
+ commands = [
99
+ ["git", "add", "main.py", "dashboard.html"],
100
+ ["git", "commit", "-m", "Restore full brain logic and fix dashboard URL"],
101
+ ["git", "push", "space", "clean_deploy:main"]
102
+ ]
103
+
104
+ for cmd in commands:
105
+ try:
106
+ print(f"Running: {' '.join(cmd)}")
107
+ result = subprocess.run(cmd, check=True, capture_output=True, text=True)
108
+ print(result.stdout)
109
+ except subprocess.CalledProcessError as e:
110
+ print(f"❌ Error running command: {' '.join(cmd)}")
111
+ print(e.stderr)
112
+ # Don't break on commit error as it might be empty
113
+ if "nothing to commit" in e.stderr:
114
+ continue
115
+ # For other errors we might want to continue or stop, but let's try to proceed
116
+ print("✅ Deployment script finished.")
117
+
118
+ if __name__ == "__main__":
119
+ print("🔧 Restoring Full Brain...")
120
+ restore_main()
121
+ update_dashboard()
122
+ deploy()
train_memory_agent.py ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import time
3
+ from agents.memory_agent import MemoryAgent
4
+
5
+ def main():
6
+ print("Initializing MemoryAgent...")
7
+ # Initialize MemoryAgent (handles Pinecone and Gemini setup)
8
+ agent = MemoryAgent()
9
+
10
+ # 1. Define Training Data
11
+ # 5-10 high-quality 'sample listings'
12
+ samples = [
13
+ {
14
+ "id": "sample_summer_tee_001",
15
+ "text": "The ultimate summer essential. This heavyweight cotton t-shirt features a boxy fit and dropped shoulders for a relaxed, contemporary silhouette. Finished with a garment dye process for a soft, broken-in feel and vintage aesthetic.",
16
+ "metadata": {"category": "Streetwear", "best_for": "Summer", "price_point": "Premium"}
17
+ },
18
+ {
19
+ "id": "sample_hoodie_002",
20
+ "text": "Engineered for comfort and durability. This fleece hoodie minimizes shrinkage and maintains its shape over time. Features double-needle stitching, a generous kangaroo pocket, and rib-knit cuffs. The style is versatile enough for gym sessions or casual weekends.",
21
+ "metadata": {"category": "Casual", "best_for": "Autumn/Winter", "price_point": "Mid-range"}
22
+ },
23
+ {
24
+ "id": "sample_joggers_003",
25
+ "text": "Performance meets style. These tapered joggers are crafted from moisture-wicking tech fabric with four-way stretch. Designed with articulated knees for maximum mobility and zippered ankle cuffs for easy on/off. Perfect for high-intensity training or athleisure wear.",
26
+ "metadata": {"category": "Activewear", "best_for": "All Season", "price_point": "Performance"}
27
+ },
28
+ {
29
+ "id": "sample_oversized_shirt_004",
30
+ "text": "A statement piece for the modern wardrobe. This oversized button-down is cut from crisp poplin with a dramatic high-low hem. The minimalist design is accented by hidden placket buttons and a sharp collar, offering a clean, architectural look.",
31
+ "metadata": {"category": "Avant-Garde", "best_for": "Spring/Summer", "price_point": "Luxury"}
32
+ },
33
+ {
34
+ "id": "sample_cargo_shorts_005",
35
+ "text": "Rugged utility for the urban explorer. These cargo shorts are built from ripstop cotton canvas. Featuring multiple bellowed pockets with snap closures, reinforced belt loops, and a gusseted crotch. Functional, durable, and ready for adventure.",
36
+ "metadata": {"category": "Streetwear", "best_for": "Summer", "price_point": "Standard"}
37
+ }
38
+ ]
39
+
40
+ print(f"Preparing to upload {len(samples)} samples to Pinecone...")
41
+
42
+ # 2. Batch Upload
43
+ vectors = []
44
+ for item in samples:
45
+ # Generate embedding using the agent's internal method
46
+ # This ensures we use the exact same model and parameters as the agent
47
+ embedding = agent._get_embedding(item['text'])
48
+
49
+ # Prepare metadata
50
+ # We include the 'text' in metadata so we can retrieve the full description later
51
+ meta = item['metadata'].copy()
52
+ meta['text'] = item['text']
53
+
54
+ vectors.append({
55
+ "id": item['id'],
56
+ "values": embedding,
57
+ "metadata": meta
58
+ })
59
+
60
+ # Upsert to Pinecone
61
+ # Using the agent's underlying index object
62
+ agent.index.upsert(vectors=vectors)
63
+ print("Upload complete. vectors upserted successfully.")
64
+
65
+ # 3. Verify
66
+ print("\n--- Verifying Data ---")
67
+ query = "summer t-shirt"
68
+ print(f"Running test search for: '{query}'")
69
+
70
+ # Generate embedding for the query
71
+ query_embedding = agent._get_embedding(query)
72
+
73
+ # Query the index
74
+ results = agent.index.query(
75
+ vector=query_embedding,
76
+ top_k=3,
77
+ include_metadata=True
78
+ )
79
+
80
+ print(f"Found {len(results.matches)} matches:")
81
+ for match in results.matches:
82
+ print(f"\nID: {match.id}")
83
+ print(f"Score: {match.score:.4f}")
84
+ print(f"Category: {match.metadata.get('category', 'N/A')}")
85
+ print(f"Snippet: {match.metadata.get('text', '')[:100]}...")
86
+
87
+ if __name__ == "__main__":
88
+ main()
upload_secrets.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+ from dotenv import load_dotenv
4
+ from huggingface_hub import add_space_secret
5
+
6
+ # Force UTF-8 output for Windows terminals
7
+ sys.stdout.reconfigure(encoding='utf-8')
8
+
9
+ def upload_secrets():
10
+ # Load environment variables from .env file
11
+ load_dotenv()
12
+
13
+ space_id = "Bhishaj/MerchFlow-AI"
14
+ keys_to_upload = ["GROQ_API_KEY", "PINECONE_API_KEY", "GOOGLE_API_KEY"]
15
+
16
+ print(f"Configuring secrets for Space: {space_id}")
17
+
18
+ for key in keys_to_upload:
19
+ value = os.getenv(key)
20
+ if value:
21
+ try:
22
+ add_space_secret(repo_id=space_id, key=key, value=value)
23
+ print(f"✅ Uploaded {key}")
24
+ except Exception as e:
25
+ print(f"❌ Failed to upload {key}: {e}")
26
+ else:
27
+ print(f"⚠️ Skipping {key} (Not found in .env)")
28
+
29
+ if __name__ == "__main__":
30
+ upload_secrets()
verify_search.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from agents.memory_agent import MemoryAgent
2
+
3
+ def check_search():
4
+ agent = MemoryAgent()
5
+ query = "urban explorer cargo"
6
+ print(f"Searching for '{query}'...")
7
+
8
+ embedding = agent._get_embedding(query)
9
+ results = agent.index.query(vector=embedding, top_k=5, include_metadata=True)
10
+
11
+ for match in results.matches:
12
+ print(f"ID: {match.id} | Score: {match.score:.4f} | Text snippet: {match.metadata.get('text', '')[:50]}")
13
+
14
+ if __name__ == "__main__":
15
+ check_search()
verify_upload.py ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ from agents.memory_agent import MemoryAgent
2
+
3
+ def check():
4
+ agent = MemoryAgent()
5
+ print("Checking for sample_summer_tee_001...")
6
+ fetch_response = agent.index.fetch(ids=["sample_summer_tee_001", "item2"])
7
+ print(fetch_response)
8
+
9
+ if __name__ == "__main__":
10
+ check()