diff --git a/.cursor/mcp.json b/.cursor/mcp.json new file mode 100644 index 0000000000000000000000000000000000000000..c2af9f31a096390b204dfcd6e014f22ccc4a91d8 --- /dev/null +++ b/.cursor/mcp.json @@ -0,0 +1,8 @@ +{ + "mcpServers": { + "dev3000": { + "type": "http", + "url": "http://localhost:3684/mcp" + } + } +} diff --git a/.env.example b/.env.example new file mode 100644 index 0000000000000000000000000000000000000000..d66eef4dc7a14e7b840e7ce10c477468ed3eca75 --- /dev/null +++ b/.env.example @@ -0,0 +1,46 @@ +# Required +FIRECRAWL_API_KEY=your_firecrawl_api_key # Get from https://firecrawl.dev (Web scraping) + +# ================================================================================= +# SANDBOX PROVIDER - Choose Option 1 OR 2 +# ================================================================================= + +# Option 1: Vercel Sandbox (recommended - default) +# Set SANDBOX_PROVIDER=vercel and choose authentication method below +SANDBOX_PROVIDER=vercel + +# Vercel Authentication - Choose method a OR b +# Method a: OIDC Token (recommended for development) +# Run `vercel link` then `vercel env pull` to get VERCEL_OIDC_TOKEN automatically +VERCEL_OIDC_TOKEN=auto_generated_by_vercel_env_pull + +# Method b: Personal Access Token (for production or when OIDC unavailable) +# VERCEL_TEAM_ID=team_xxxxxxxxx # Your Vercel team ID +# VERCEL_PROJECT_ID=prj_xxxxxxxxx # Your Vercel project ID +# VERCEL_TOKEN=vercel_xxxxxxxxxxxx # Personal access token from Vercel dashboard + +# Get yours at https://console.groq.com +GROQ_API_KEY=your_groq_api_key_here + +======= +# Option 2: E2B Sandbox +# Set SANDBOX_PROVIDER=e2b and configure E2B_API_KEY below +# SANDBOX_PROVIDER=e2b +# E2B_API_KEY=your_e2b_api_key # Get from https://e2b.dev + +# ================================================================================= +# AI PROVIDERS - Need at least one +# ================================================================================= + +# Vercel AI Gateway (recommended - provides access to multiple models) +AI_GATEWAY_API_KEY=your_ai_gateway_api_key # Get from https://vercel.com/dashboard/ai-gateway/api-keys + +# Individual provider keys (used when AI_GATEWAY_API_KEY is not set) +ANTHROPIC_API_KEY=your_anthropic_api_key # Get from https://console.anthropic.com +OPENAI_API_KEY=your_openai_api_key # Get from https://platform.openai.com (GPT-5) +GEMINI_API_KEY=your_gemini_api_key # Get from https://aistudio.google.com/app/apikey +GROQ_API_KEY=your_groq_api_key # Get from https://console.groq.com (Fast inference - Kimi K2 recommended) + +# Optional Morph Fast Apply +# Get yours at https://morphllm.com/ +MORPH_API_KEY=your_fast_apply_key diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..8439a0898f25054839c4cb418071cedf72f3bc0a --- /dev/null +++ b/.gitignore @@ -0,0 +1,60 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +**/node_modules/ +/.pnp +.pnp.* +.yarn/* +!.yarn/patches +!.yarn/plugins +!.yarn/releases +!.yarn/versions + +# testing +/coverage + +# next.js +/.next/ +/out/ + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.pnpm-debug.log* + +# env files (can opt-in for committing if needed) +.env* +.env.local +!.env.example + +# vercel +.vercel + +# typescript +*.tsbuildinfo +next-env.d.ts + +# E2B template builds +*.tar.gz +e2b-template-* + +# IDE +.vscode/ +.idea/ + +# Temporary files +*.tmp +*.temp +repomix-output.txt +bun.lockb +.env*.local +\n# Log files\n*.log diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..61ce5370b7cc99b9c2b22f1aa2ed845bc75c0471 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,26 @@ +# Use an official Node.js runtime as a parent image +FROM node:18-slim + +# Set the working directory in the container +WORKDIR /app + +# Copy package.json and pnpm-lock.yaml to leverage Docker cache +COPY package.json pnpm-lock.yaml ./ + +# Install pnpm +RUN npm install -g pnpm + +# Install dependencies +RUN pnpm install + +# Copy the rest of the application's code +COPY . . + +# Build the Next.js application +RUN pnpm build + +# Expose the port the app runs on +EXPOSE 3000 + +# Command to run the application +CMD ["pnpm", "start"] diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..5b765c90e0bc6fa718ee6390e49dc3f6ca521f0b --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/PROJECT_README.md b/PROJECT_README.md new file mode 100644 index 0000000000000000000000000000000000000000..b5bbbcdf7592a20bf249a5344689ffe142eab223 --- /dev/null +++ b/PROJECT_README.md @@ -0,0 +1,67 @@ +# Open Lovable + +Chat with AI to build React apps instantly. An example app made by the [Firecrawl](https://firecrawl.dev/?ref=open-lovable-github) team. For a complete cloud solution, check out [Lovable.dev](https://lovable.dev/) ❤️. + +Open Lovable Demo + +## Setup + +1. **Clone & Install** +```bash +git clone https://github.com/firecrawl/open-lovable.git +cd open-lovable +pnpm install # or npm install / yarn install +``` + +2. **Add `.env.local`** + +```env +# ================================================================= +# REQUIRED +# ================================================================= +FIRECRAWL_API_KEY=your_firecrawl_api_key # https://firecrawl.dev + +# ================================================================= +# AI PROVIDER - Choose your LLM +# ================================================================= +GEMINI_API_KEY=your_gemini_api_key # https://aistudio.google.com/app/apikey +ANTHROPIC_API_KEY=your_anthropic_api_key # https://console.anthropic.com +OPENAI_API_KEY=your_openai_api_key # https://platform.openai.com +GROQ_API_KEY=your_groq_api_key # https://console.groq.com + +# ================================================================= +# FAST APPLY (Optional - for faster edits) +# ================================================================= +MORPH_API_KEY=your_morphllm_api_key # https://morphllm.com/dashboard + +# ================================================================= +# SANDBOX PROVIDER - Choose ONE: Vercel (default) or E2B +# ================================================================= +SANDBOX_PROVIDER=vercel # or 'e2b' + +# Option 1: Vercel Sandbox (default) +# Choose one authentication method: + +# Method A: OIDC Token (recommended for development) +# Run `vercel link` then `vercel env pull` to get VERCEL_OIDC_TOKEN automatically +VERCEL_OIDC_TOKEN=auto_generated_by_vercel_env_pull + +# Method B: Personal Access Token (for production or when OIDC unavailable) +# VERCEL_TEAM_ID=team_xxxxxxxxx # Your Vercel team ID +# VERCEL_PROJECT_ID=prj_xxxxxxxxx # Your Vercel project ID +# VERCEL_TOKEN=vercel_xxxxxxxxxxxx # Personal access token from Vercel dashboard + +# Option 2: E2B Sandbox +# E2B_API_KEY=your_e2b_api_key # https://e2b.dev +``` + +3. **Run** +```bash +pnpm dev # or npm run dev / yarn dev +``` + +Open [http://localhost:3000](http://localhost:3000) + +## License + +MIT \ No newline at end of file diff --git a/README.md b/README.md index 465b74986000dcab4a55bd40f03bda3df504ec97..222582cc1ff3646fc6ec6acd2473a7bd794db934 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,10 @@ --- -title: Openoperator -emoji: 🐠 -colorFrom: blue -colorTo: gray +# Trigger rebuild +title: Open Lovable +emoji: ❤️ +colorFrom: pink +colorTo: blue sdk: docker +app_port: 3000 pinned: false --- - -Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference diff --git a/app/api/analyze-edit-intent/route.ts b/app/api/analyze-edit-intent/route.ts new file mode 100644 index 0000000000000000000000000000000000000000..07798a03e36e1fbb9f68a93d3547c633e4547d31 --- /dev/null +++ b/app/api/analyze-edit-intent/route.ts @@ -0,0 +1,190 @@ +import { NextRequest, NextResponse } from 'next/server'; +import { createGroq } from '@ai-sdk/groq'; +import { createAnthropic } from '@ai-sdk/anthropic'; +import { createOpenAI } from '@ai-sdk/openai'; +import { createGoogleGenerativeAI } from '@ai-sdk/google'; +import { generateObject } from 'ai'; +import { z } from 'zod'; +// import type { FileManifest } from '@/types/file-manifest'; // Type is used implicitly through manifest parameter + +// Check if we're using Vercel AI Gateway +const isUsingAIGateway = !!process.env.AI_GATEWAY_API_KEY; +const aiGatewayBaseURL = 'https://ai-gateway.vercel.sh/v1'; + +const groq = createGroq({ + apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.GROQ_API_KEY, + baseURL: isUsingAIGateway ? aiGatewayBaseURL : undefined, +}); + +const anthropic = createAnthropic({ + apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.ANTHROPIC_API_KEY, + baseURL: isUsingAIGateway ? aiGatewayBaseURL : (process.env.ANTHROPIC_BASE_URL || 'https://api.anthropic.com/v1'), +}); + +const openai = createOpenAI({ + apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.OPENAI_API_KEY, + baseURL: isUsingAIGateway ? aiGatewayBaseURL : process.env.OPENAI_BASE_URL, +}); + +const googleGenerativeAI = createGoogleGenerativeAI({ + apiKey: process.env.AI_GATEWAY_API_KEY ?? process.env.GEMINI_API_KEY, + baseURL: isUsingAIGateway ? aiGatewayBaseURL : undefined, +}); + +// Schema for the AI's search plan - not file selection! +const searchPlanSchema = z.object({ + editType: z.enum([ + 'UPDATE_COMPONENT', + 'ADD_FEATURE', + 'FIX_ISSUE', + 'UPDATE_STYLE', + 'REFACTOR', + 'ADD_DEPENDENCY', + 'REMOVE_ELEMENT' + ]).describe('The type of edit being requested'), + + reasoning: z.string().describe('Explanation of the search strategy'), + + searchTerms: z.array(z.string()).describe('Specific text to search for (case-insensitive). Be VERY specific - exact button text, class names, etc.'), + + regexPatterns: z.array(z.string()).optional().describe('Regex patterns for finding code structures (e.g., "className=[\\"\\\'].*header.*[\\"\\\']")'), + + fileTypesToSearch: z.array(z.string()).default(['.jsx', '.tsx', '.js', '.ts']).describe('File extensions to search'), + + expectedMatches: z.number().min(1).max(10).default(1).describe('Expected number of matches (helps validate search worked)'), + + fallbackSearch: z.object({ + terms: z.array(z.string()), + patterns: z.array(z.string()).optional() + }).optional().describe('Backup search if primary fails') +}); + +export async function POST(request: NextRequest) { + try { + const { prompt, manifest, model = 'openai/gpt-oss-20b' } = await request.json(); + + console.log('[analyze-edit-intent] Request received'); + console.log('[analyze-edit-intent] Prompt:', prompt); + console.log('[analyze-edit-intent] Model:', model); + console.log('[analyze-edit-intent] Manifest files count:', manifest?.files ? Object.keys(manifest.files).length : 0); + + if (!prompt || !manifest) { + return NextResponse.json({ + error: 'prompt and manifest are required' + }, { status: 400 }); + } + + // Create a summary of available files for the AI + const validFiles = Object.entries(manifest.files as Record) + .filter(([path]) => { + // Filter out invalid paths + return path.includes('.') && !path.match(/\/\d+$/); + }); + + const fileSummary = validFiles + .map(([path, info]: [string, any]) => { + const componentName = info.componentInfo?.name || path.split('/').pop(); + // const hasImports = info.imports?.length > 0; // Kept for future use + const childComponents = info.componentInfo?.childComponents?.join(', ') || 'none'; + return `- ${path} (${componentName}, renders: ${childComponents})`; + }) + .join('\n'); + + console.log('[analyze-edit-intent] Valid files found:', validFiles.length); + + if (validFiles.length === 0) { + console.error('[analyze-edit-intent] No valid files found in manifest'); + return NextResponse.json({ + success: false, + error: 'No valid files found in manifest' + }, { status: 400 }); + } + + console.log('[analyze-edit-intent] Analyzing prompt:', prompt); + console.log('[analyze-edit-intent] File summary preview:', fileSummary.split('\n').slice(0, 5).join('\n')); + + // Select the appropriate AI model based on the request + let aiModel; + if (model.startsWith('anthropic/')) { + aiModel = anthropic(model.replace('anthropic/', '')); + } else if (model.startsWith('openai/')) { + if (model.includes('gpt-oss')) { + aiModel = groq(model); + } else { + aiModel = openai(model.replace('openai/', '')); + } + } else if (model.startsWith('google/')) { + aiModel = googleGenerativeAI(model.replace('google/', '')); + } else { + // Default to groq if model format is unclear + aiModel = groq(model); + } + + console.log('[analyze-edit-intent] Using AI model:', model); + + // Use AI to create a search plan + const result = await generateObject({ + model: aiModel, + schema: searchPlanSchema, + messages: [ + { + role: 'system', + content: `You are an expert at planning code searches. Your job is to create a search strategy to find the exact code that needs to be edited. + +DO NOT GUESS which files to edit. Instead, provide specific search terms that will locate the code. + +SEARCH STRATEGY RULES: +1. For text changes (e.g., "change 'Start Deploying' to 'Go Now'"): + - Search for the EXACT text: "Start Deploying" + +2. For style changes (e.g., "make header black"): + - Search for component names: "Header", "; + var sandboxState: SandboxState; +} + +interface ParsedResponse { + explanation: string; + template: string; + files: Array<{ path: string; content: string }>; + packages: string[]; + commands: string[]; + structure: string | null; +} + +function parseAIResponse(response: string): ParsedResponse { + const sections = { + files: [] as Array<{ path: string; content: string }>, + commands: [] as string[], + packages: [] as string[], + structure: null as string | null, + explanation: '', + template: '' + }; + + // Function to extract packages from import statements + function extractPackagesFromCode(content: string): string[] { + const packages: string[] = []; + // Match ES6 imports + const importRegex = /import\s+(?:(?:\{[^}]*\}|\*\s+as\s+\w+|\w+)(?:\s*,\s*(?:\{[^}]*\}|\*\s+as\s+\w+|\w+))*\s+from\s+)?['"]([^'"]+)['"]/g; + let importMatch; + + while ((importMatch = importRegex.exec(content)) !== null) { + const importPath = importMatch[1]; + // Skip relative imports and built-in React + if (!importPath.startsWith('.') && !importPath.startsWith('/') && + importPath !== 'react' && importPath !== 'react-dom' && + !importPath.startsWith('@/')) { + // Extract package name (handle scoped packages like @heroicons/react) + const packageName = importPath.startsWith('@') + ? importPath.split('/').slice(0, 2).join('/') + : importPath.split('/')[0]; + + if (!packages.includes(packageName)) { + packages.push(packageName); + + // Log important packages for debugging + if (packageName === 'react-router-dom' || packageName.includes('router') || packageName.includes('icon')) { + console.log(`[apply-ai-code-stream] Detected package from imports: ${packageName}`); + } + } + } + } + + return packages; + } + + // Parse file sections - handle duplicates and prefer complete versions + const fileMap = new Map(); + + // First pass: Find all file declarations + const fileRegex = /([\s\S]*?)(?:<\/file>|$)/g; + let match; + while ((match = fileRegex.exec(response)) !== null) { + const filePath = match[1]; + const content = match[2].trim(); + const hasClosingTag = response.substring(match.index, match.index + match[0].length).includes(''); + + // Check if this file already exists in our map + const existing = fileMap.get(filePath); + + // Decide whether to keep this version + let shouldReplace = false; + if (!existing) { + shouldReplace = true; // First occurrence + } else if (!existing.isComplete && hasClosingTag) { + shouldReplace = true; // Replace incomplete with complete + console.log(`[apply-ai-code-stream] Replacing incomplete ${filePath} with complete version`); + } else if (existing.isComplete && hasClosingTag && content.length > existing.content.length) { + shouldReplace = true; // Replace with longer complete version + console.log(`[apply-ai-code-stream] Replacing ${filePath} with longer complete version`); + } else if (!existing.isComplete && !hasClosingTag && content.length > existing.content.length) { + shouldReplace = true; // Both incomplete, keep longer one + } + + if (shouldReplace) { + // Additional validation: reject obviously broken content + if (content.includes('...') && !content.includes('...props') && !content.includes('...rest')) { + console.warn(`[apply-ai-code-stream] Warning: ${filePath} contains ellipsis, may be truncated`); + // Still use it if it's the only version we have + if (!existing) { + fileMap.set(filePath, { content, isComplete: hasClosingTag }); + } + } else { + fileMap.set(filePath, { content, isComplete: hasClosingTag }); + } + } + } + + // Convert map to array for sections.files + for (const [path, { content, isComplete }] of fileMap.entries()) { + if (!isComplete) { + console.log(`[apply-ai-code-stream] Warning: File ${path} appears to be truncated (no closing tag)`); + } + + sections.files.push({ + path, + content + }); + + // Extract packages from file content + const filePackages = extractPackagesFromCode(content); + for (const pkg of filePackages) { + if (!sections.packages.includes(pkg)) { + sections.packages.push(pkg); + console.log(`[apply-ai-code-stream] 📦 Package detected from imports: ${pkg}`); + } + } + } + + // Also parse markdown code blocks with file paths + const markdownFileRegex = /```(?:file )?path="([^"]+)"\n([\s\S]*?)```/g; + while ((match = markdownFileRegex.exec(response)) !== null) { + const filePath = match[1]; + const content = match[2].trim(); + sections.files.push({ + path: filePath, + content: content + }); + + // Extract packages from file content + const filePackages = extractPackagesFromCode(content); + for (const pkg of filePackages) { + if (!sections.packages.includes(pkg)) { + sections.packages.push(pkg); + console.log(`[apply-ai-code-stream] 📦 Package detected from imports: ${pkg}`); + } + } + } + + // Parse plain text format like "Generated Files: Header.jsx, index.css" + const generatedFilesMatch = response.match(/Generated Files?:\s*([^\n]+)/i); + if (generatedFilesMatch) { + // Split by comma first, then trim whitespace, to preserve filenames with dots + const filesList = generatedFilesMatch[1] + .split(',') + .map(f => f.trim()) + .filter(f => f.endsWith('.jsx') || f.endsWith('.js') || f.endsWith('.tsx') || f.endsWith('.ts') || f.endsWith('.css') || f.endsWith('.json') || f.endsWith('.html')); + console.log(`[apply-ai-code-stream] Detected generated files from plain text: ${filesList.join(', ')}`); + + // Try to extract the actual file content if it follows + for (const fileName of filesList) { + // Look for the file content after the file name + const fileContentRegex = new RegExp(`${fileName}[\\s\\S]*?(?:import[\\s\\S]+?)(?=Generated Files:|Applying code|$)`, 'i'); + const fileContentMatch = response.match(fileContentRegex); + if (fileContentMatch) { + // Extract just the code part (starting from import statements) + const codeMatch = fileContentMatch[0].match(/^(import[\s\S]+)$/m); + if (codeMatch) { + const filePath = fileName.includes('/') ? fileName : `src/components/${fileName}`; + sections.files.push({ + path: filePath, + content: codeMatch[1].trim() + }); + console.log(`[apply-ai-code-stream] Extracted content for ${filePath}`); + + // Extract packages from this file + const filePackages = extractPackagesFromCode(codeMatch[1]); + for (const pkg of filePackages) { + if (!sections.packages.includes(pkg)) { + sections.packages.push(pkg); + console.log(`[apply-ai-code-stream] Package detected from imports: ${pkg}`); + } + } + } + } + } + } + + // Also try to parse if the response contains raw JSX/JS code blocks + const codeBlockRegex = /```(?:jsx?|tsx?|javascript|typescript)?\n([\s\S]*?)```/g; + while ((match = codeBlockRegex.exec(response)) !== null) { + const content = match[1].trim(); + // Try to detect the file name from comments or context + const fileNameMatch = content.match(/\/\/\s*(?:File:|Component:)\s*([^\n]+)/); + if (fileNameMatch) { + const fileName = fileNameMatch[1].trim(); + const filePath = fileName.includes('/') ? fileName : `src/components/${fileName}`; + + // Don't add duplicate files + if (!sections.files.some(f => f.path === filePath)) { + sections.files.push({ + path: filePath, + content: content + }); + + // Extract packages + const filePackages = extractPackagesFromCode(content); + for (const pkg of filePackages) { + if (!sections.packages.includes(pkg)) { + sections.packages.push(pkg); + } + } + } + } + } + + // Parse commands + const cmdRegex = /(.*?)<\/command>/g; + while ((match = cmdRegex.exec(response)) !== null) { + sections.commands.push(match[1].trim()); + } + + // Parse packages - support both and tags + const pkgRegex = /(.*?)<\/package>/g; + while ((match = pkgRegex.exec(response)) !== null) { + sections.packages.push(match[1].trim()); + } + + // Also parse tag with multiple packages + const packagesRegex = /([\s\S]*?)<\/packages>/; + const packagesMatch = response.match(packagesRegex); + if (packagesMatch) { + const packagesContent = packagesMatch[1].trim(); + // Split by newlines or commas + const packagesList = packagesContent.split(/[\n,]+/) + .map(pkg => pkg.trim()) + .filter(pkg => pkg.length > 0); + sections.packages.push(...packagesList); + } + + // Parse structure + const structureMatch = /([\s\S]*?)<\/structure>/; + const structResult = response.match(structureMatch); + if (structResult) { + sections.structure = structResult[1].trim(); + } + + // Parse explanation + const explanationMatch = /([\s\S]*?)<\/explanation>/; + const explResult = response.match(explanationMatch); + if (explResult) { + sections.explanation = explResult[1].trim(); + } + + // Parse template + const templateMatch = /