Spaces:
Sleeping
Sleeping
| import { serve } from "bun"; | |
| import { type Repo } from "./types.ts"; | |
| // --- Configuration Constants --- | |
| const PORT = 7860; | |
| const REFRESH_INTERVAL_MS = 30 * 60 * 1000; // 30 minutes in milliseconds | |
| const MAX_SEARCH_RESULTS = 25; | |
| const DEFAULT_PAGINATION_SIZE = 10; | |
| // Changed to a single URL for packages | |
| const PACKAGE_URL = | |
| "https://raw.githubusercontent.com/Zigistry/database/refs/heads/main/database/packages.json"; | |
| const PROGRAMS_URL = | |
| "https://raw.githubusercontent.com/Zigistry/database/refs/heads/main/database/programs.json"; | |
| // --- CORS Headers --- | |
| const CORS_HEADERS: Record<string, string> = { | |
| "Content-Type": "application/json", | |
| "Access-Control-Allow-Origin": "*", | |
| }; | |
| // --- Data Store --- | |
| interface DataStore { | |
| packages: Repo[]; | |
| programs: Repo[]; | |
| sortedPackages: { | |
| latest: Repo[]; | |
| mostUsed: Repo[]; | |
| }; | |
| sortedPrograms: { | |
| latest: Repo[]; | |
| mostUsed: Repo[]; | |
| }; | |
| lastLoaded: number; // Timestamp of last successful data load | |
| } | |
| const dataStore: DataStore = { | |
| packages: [], | |
| programs: [], | |
| sortedPackages: { latest: [], mostUsed: [] }, | |
| sortedPrograms: { latest: [], mostUsed: [] }, | |
| lastLoaded: 0, | |
| }; | |
| // --- Helper Functions --- | |
| function removeReadmeContent(repos: Repo[]): Repo[] { | |
| return (repos.map((repo) => { | |
| const { readme_content, ...rest } = repo; // Destructure to exclude readme_content | |
| return rest; | |
| }) as Repo[]); | |
| } | |
| const sortByDate = (a: Repo, b: Repo) => | |
| new Date(b.created_at ?? "").getTime() - | |
| new Date(a.created_at ?? "").getTime(); | |
| const sortByUsage = (a: Repo, b: Repo) => | |
| (b.stargazers_count ?? 0) - (a.stargazers_count ?? 0); | |
| function filterItems( | |
| items: Repo[], | |
| q: string | null, | |
| filter: string | null | |
| ): Repo[] { | |
| const lowerQ = q?.toLowerCase(); | |
| const lowerFilter = filter?.toLowerCase(); | |
| return items.filter( | |
| ({ name, full_name, description, topics, readme_content }) => { | |
| // Apply topic filter first | |
| if ( | |
| lowerFilter && | |
| !topics?.some((t) => t.toLowerCase() === lowerFilter) | |
| ) { | |
| return false; | |
| } | |
| // If no search query, all items passing the filter are included | |
| if (!lowerQ) { | |
| return true; | |
| } | |
| // Check if any relevant field includes the search query | |
| return [name, full_name, description, ...(topics ?? []), readme_content].some( | |
| (field) => field?.toLowerCase().includes(lowerQ) | |
| ); | |
| } | |
| ); | |
| } | |
| function getPaginated<T>(items: T[], page = 0, size = DEFAULT_PAGINATION_SIZE): T[] { | |
| const start = page * size; | |
| return items.slice(start, start + size); | |
| } | |
| function parseRange(str: string | null, max: number): [number, number] { | |
| const match = str?.match(/^(\d+)\.\.(\d+)$/); | |
| let start = 0; | |
| let end = DEFAULT_PAGINATION_SIZE; // Default range if no match | |
| if (match) { | |
| start = parseInt(match[1] ?? "0", 10); | |
| end = parseInt(match[2] ?? String(DEFAULT_PAGINATION_SIZE), 10); | |
| } | |
| return [Math.max(0, start), Math.min(max, end)]; | |
| } | |
| // --- Data Loading and Caching --- | |
| async function fetchData<T>(url: string): Promise<T | null> { | |
| try { | |
| const response = await fetch(url); | |
| if (!response.ok) { | |
| console.error(`Failed to fetch ${url}: ${response.statusText}`); | |
| return null; | |
| } | |
| return (await response.json()) as T; | |
| } catch (error) { | |
| console.error(`Error fetching or parsing ${url}:`, error); | |
| return null; | |
| } | |
| } | |
| async function loadData() { | |
| console.log("Attempting to load data..."); | |
| try { | |
| // Fetch packages (now from a single URL) | |
| const newPackages = await fetchData<Repo[]>(PACKAGE_URL); | |
| // Fetch programs | |
| const newPrograms = await fetchData<Repo[]>(PROGRAMS_URL); | |
| if (newPackages && newPrograms) { | |
| dataStore.packages = newPackages; | |
| dataStore.programs = newPrograms; | |
| // Pre-sort data after successful load | |
| dataStore.sortedPackages.latest = [...newPackages].sort(sortByDate); | |
| dataStore.sortedPackages.mostUsed = [...newPackages].sort(sortByUsage); | |
| dataStore.sortedPrograms.latest = [...newPrograms].sort(sortByDate); | |
| dataStore.sortedPrograms.mostUsed = [...newPrograms].sort(sortByUsage); | |
| dataStore.lastLoaded = Date.now(); | |
| console.log("Data loaded successfully."); | |
| } else { | |
| console.warn("Failed to load all data. Retaining old data if available."); | |
| } | |
| } catch (error) { | |
| console.error("Critical error during data loading:", error); | |
| } | |
| } | |
| async function ensureDataLoaded() { | |
| if (Date.now() - dataStore.lastLoaded > REFRESH_INTERVAL_MS) { | |
| console.log("Data is stale, refreshing..."); | |
| await loadData(); | |
| } else if (dataStore.lastLoaded === 0) { | |
| // Initial load if not already done | |
| console.log("Initial data load..."); | |
| await loadData(); | |
| } | |
| } | |
| // Perform initial data load immediately on server start | |
| await loadData(); | |
| // Set up interval for refreshing data | |
| setInterval(ensureDataLoaded, REFRESH_INTERVAL_MS / 2); // Check more frequently than refresh interval | |
| // --- API Route Handlers --- | |
| function handleSearch(items: Repo[], searchParams: URLSearchParams): Response { | |
| const q = searchParams.get("q")?.trim() ?? null; | |
| const filter = searchParams.get("filter")?.trim() ?? null; | |
| const result = filterItems(items, q, filter).slice(0, MAX_SEARCH_RESULTS); | |
| return Response.json(removeReadmeContent(result), { headers: CORS_HEADERS }); | |
| } | |
| function handleInfiniteScroll(items: Repo[], searchParams: URLSearchParams): Response { | |
| const page = parseInt(searchParams.get("pageNumber") || "0", 10); | |
| if (isNaN(page) || page < 0) { | |
| return Response.json( | |
| { error: "Invalid page number. Must be a non-negative integer." }, | |
| { status: 400, headers: CORS_HEADERS } | |
| ); | |
| } | |
| const result = getPaginated(items, page); | |
| return Response.json(removeReadmeContent(result), { headers: CORS_HEADERS }); | |
| } | |
| function handleSingleItem( | |
| items: Repo[], | |
| owner: string, | |
| repo: string, | |
| repoFrom: string | null = null | |
| ): Response { | |
| const found = items.find( | |
| (item) => | |
| item.full_name?.toLowerCase() === `${owner}/${repo}` && | |
| (repoFrom === null || item.repo_from?.toLowerCase() === repoFrom) | |
| ); | |
| if (found) { | |
| // Return the full item, including readme_content, for single item requests | |
| return Response.json(found, { status: 200, headers: CORS_HEADERS }); | |
| } | |
| return Response.json( | |
| { error: "Item not found" }, | |
| { status: 404, headers: CORS_HEADERS } | |
| ); | |
| } | |
| function handleIndexDetails( | |
| sortedData: { latest: Repo[]; mostUsed: Repo[] }, | |
| searchParams: URLSearchParams | |
| ): Response { | |
| const section = searchParams.get("section"); | |
| if (section !== "latestRepos" && section !== "mostUsed") { | |
| return Response.json( | |
| { error: "Invalid section. Must be 'latestRepos' or 'mostUsed'." }, | |
| { status: 400, headers: CORS_HEADERS } | |
| ); | |
| } | |
| const sortKey = section === "latestRepos" ? "latest" : "mostUsed"; | |
| const data = sortedData[sortKey]; | |
| const [start, end] = parseRange(searchParams.get("range"), data.length); | |
| const result = data.slice(start, end); | |
| return Response.json(removeReadmeContent(result), { headers: CORS_HEADERS }); | |
| } | |
| // --- Router Mapping --- | |
| const routes: Record< | |
| string, | |
| (url: URL) => Promise<Response> | Response | |
| > = { | |
| "/api/searchPackages": (url) => | |
| handleSearch(dataStore.packages, url.searchParams), | |
| "/api/searchPrograms": (url) => | |
| handleSearch(dataStore.programs, url.searchParams), | |
| "/api/infiniteScrollPackages": (url) => | |
| handleInfiniteScroll(dataStore.packages, url.searchParams), | |
| "/api/infiniteScrollPrograms": (url) => | |
| handleInfiniteScroll(dataStore.programs, url.searchParams), | |
| "/api/indexDetailsPackages": (url) => | |
| handleIndexDetails(dataStore.sortedPackages, url.searchParams), | |
| "/api/indexDetailsPrograms": (url) => | |
| handleIndexDetails(dataStore.sortedPrograms, url.searchParams), | |
| }; | |
| // --- Server --- | |
| serve({ | |
| port: PORT, | |
| async fetch(req) { | |
| const url = new URL(req.url); | |
| const { pathname, searchParams } = url; | |
| // Handle CORS preflight | |
| if (req.method === "OPTIONS") { | |
| return new Response(null, { | |
| status: 204, | |
| headers: { | |
| ...CORS_HEADERS, | |
| "Access-Control-Allow-Methods": "GET, POST, OPTIONS", | |
| "Access-Control-Allow-Headers": "Content-Type", | |
| }, | |
| }); | |
| } | |
| // Ensure data is loaded/refreshed before handling requests | |
| await ensureDataLoaded(); | |
| // Direct route match | |
| if (routes[pathname]) { | |
| return routes[pathname](url); | |
| } | |
| // Regex-based routes for single items | |
| const programMatch = pathname.match( | |
| /^\/api\/programs\/([^/]+)\/([^/]+)\/([^/]+)$/ | |
| ); | |
| if (programMatch) { | |
| const repo_from = programMatch[1]?.toLowerCase() ?? ""; | |
| const owner = programMatch[2]?.toLowerCase() ?? ""; | |
| const repo = programMatch[3]?.toLowerCase() ?? ""; | |
| return handleSingleItem(dataStore.programs, owner, repo, repo_from); | |
| } | |
| const packageMatch = pathname.match( | |
| /^\/api\/packages\/([^/]+)\/([^/]+)\/([^/]+)$/ | |
| ); | |
| if (packageMatch) { | |
| const repo_from = packageMatch[1]?.toLowerCase() ?? ""; | |
| const owner = packageMatch[2]?.toLowerCase() ?? ""; | |
| const repo = packageMatch[3]?.toLowerCase() ?? ""; | |
| return handleSingleItem(dataStore.packages, owner, repo, repo_from); | |
| } | |
| // Not found | |
| return new Response("Not Found", { | |
| status: 404, | |
| headers: CORS_HEADERS, | |
| }); | |
| }, | |
| }); | |
| console.log(`Server running on http://localhost:${PORT}`); | |