FalconNet commited on
Commit
7e7d367
·
verified ·
1 Parent(s): 895145e

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +219 -1
README.md CHANGED
@@ -3,4 +3,222 @@ license: mit
3
  ---
4
 
5
  ## FalconNet/FunPay-Minecraft-Lots-Mini-6k
6
- - Prices are in Rubles
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  ---
4
 
5
  ## FalconNet/FunPay-Minecraft-Lots-Mini-6k
6
+ - Prices are in Rubles
7
+
8
+
9
+ ### Script used to create this:
10
+ ```python
11
+ from __future__ import annotations
12
+
13
+ import argparse
14
+ import asyncio
15
+ import csv
16
+ import json
17
+ import re
18
+ from dataclasses import dataclass, asdict
19
+ from pathlib import Path
20
+ from typing import List, Optional
21
+
22
+ from bs4 import BeautifulSoup
23
+ from playwright.async_api import async_playwright, TimeoutError as PlaywrightTimeout
24
+
25
+
26
+ @dataclass
27
+ class Lot:
28
+ """Represents a single offer on FunPay."""
29
+
30
+ lot_id: int
31
+ server: str
32
+ title: str
33
+ category: str
34
+ seller: str
35
+ reviews: int
36
+ price: float
37
+ currency: str
38
+ link: str
39
+
40
+
41
+ class FunpayParser:
42
+ """High-level API for scraping the public FunPay catalog."""
43
+
44
+ BASE_URL = "https://funpay.com"
45
+
46
+ def __init__(
47
+ self,
48
+ language: str = "ru",
49
+ currency: str = "RUB",
50
+ headless: bool = False,
51
+ timeout: int = 10_000,
52
+ ) -> None:
53
+ self.language = language
54
+ self.currency = currency
55
+ self.headless = headless
56
+ self.timeout = timeout
57
+
58
+ # ─────────────────────────── internal helpers ──────────────────────────── #
59
+
60
+ async def _fetch_html(self, url: str) -> str:
61
+ """Render *url* with Playwright and return the final HTML source."""
62
+ async with async_playwright() as p:
63
+ browser = await p.chromium.launch(headless=self.headless)
64
+ context = await browser.new_context(
65
+ locale=self.language,
66
+ extra_http_headers={
67
+ "Accept-Language": self.language,
68
+ "User-Agent": (
69
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
70
+ "AppleWebKit/537.36 (KHTML, like Gecko) "
71
+ "Chrome/124.0.0.0 Safari/537.36"
72
+ ),
73
+ },
74
+ )
75
+ # Pre-set currency so the prices are parsed consistently
76
+ await context.add_cookies(
77
+ [
78
+ {
79
+ "name": "cy",
80
+ "value": self.currency,
81
+ "domain": "funpay.com",
82
+ "path": "/",
83
+ }
84
+ ]
85
+ )
86
+
87
+ page = await context.new_page()
88
+ try:
89
+ # Navigate and wait for the full page load, including JS content
90
+ await page.goto(url, wait_until="load", timeout=self.timeout)
91
+
92
+ # Явное ожидание появления важного элемента (например, списка лотов)
93
+ await page.wait_for_selector(".tc-item", timeout=10_000) # Ждем появления первого лота
94
+
95
+ except PlaywrightTimeout:
96
+ await browser.close()
97
+ raise RuntimeError(f"Page load timeout: {url}")
98
+
99
+ # Accept cookies banner if present
100
+ try:
101
+ await page.locator("button:has-text('Agree')").click(timeout=2_000)
102
+ except PlaywrightTimeout:
103
+ pass # banner not shown
104
+
105
+ html = await page.content()
106
+ await browser.close()
107
+ return html
108
+
109
+ def _parse_html(self, html: str) -> List[Lot]:
110
+ """Extract structured data from the rendered HTML using BeautifulSoup."""
111
+ soup = BeautifulSoup(html, "lxml")
112
+ lots: List[Lot] = []
113
+
114
+ for anchor in soup.select("a.tc-item"):
115
+ link = self.BASE_URL + anchor.get("href", "")
116
+
117
+ # Server / sub-category (can be empty)
118
+ server_el = anchor.select_one(".tc-server")
119
+ server = server_el.get_text(strip=True) if server_el else ""
120
+
121
+ # Title / description
122
+ title_el = anchor.select_one(".tc-desc") or anchor.select_one(".tc-title")
123
+ title = title_el.get_text(strip=True) if title_el else ""
124
+
125
+ # Small badge with an additional category mark
126
+ badge_el = anchor.select_one(".badge")
127
+ category = badge_el.get_text(strip=True) if badge_el else ""
128
+
129
+ # Seller block
130
+ seller_el = anchor.select_one(".media-user-name")
131
+ seller = seller_el.get_text(strip=True) if seller_el else ""
132
+ # Reviews are rendered as “(1234)”
133
+ rev_el = anchor.select_one(".rating-count")
134
+ reviews = int(rev_el.get_text(strip=True).strip("()")) if rev_el else 0
135
+
136
+ # Price block
137
+ price_el = anchor.select_one(".tc-price")
138
+ price_val, price_curr = 0.0, self.currency
139
+ if price_el:
140
+ match = re.match(r"([\d\.,]+)\s*([^\d\s]+)", price_el.get_text(strip=True))
141
+ if match:
142
+ raw, price_curr = match.groups()
143
+ price_val = float(raw.replace(",", "."))
144
+
145
+ # Lot ID is available only in the lot link
146
+ match_id = re.search(r"id=(\d+)", link)
147
+ lot_id = int(match_id.group(1)) if match_id else 0
148
+
149
+ lots.append(
150
+ Lot(
151
+ lot_id=lot_id,
152
+ server=server,
153
+ title=title,
154
+ category=category,
155
+ seller=seller,
156
+ reviews=reviews,
157
+ price=price_val,
158
+ currency=price_curr,
159
+ link=link,
160
+ )
161
+ )
162
+ return lots
163
+
164
+ async def _collect_lots(self, section_id: int, pages: int) -> List[Lot]:
165
+ """Coroutine that loads *pages* pages for *section_id* sequentially."""
166
+ result: List[Lot] = []
167
+ for idx in range(1, pages + 1):
168
+ url = f"{self.BASE_URL}/{self.language}/lots/{section_id}/?page={idx}"
169
+ html = await self._fetch_html(url)
170
+ page_lots = self._parse_html(html)
171
+ if not page_lots:
172
+ break # reached last page
173
+ result.extend(page_lots)
174
+ return result
175
+
176
+ # ───────────────────────────── public API ──────────────────────────────── #
177
+
178
+ def parse(self, section_id: int, pages: int = 1) -> List[Lot]:
179
+ """Synchronously scrape *pages* from a lots section."""
180
+ return asyncio.run(self._collect_lots(section_id, pages))
181
+
182
+
183
+ # I/O helpers remain unchanged
184
+
185
+ def save_json(lots: List[Lot], path: Path) -> None:
186
+ path.write_text(json.dumps([asdict(x) for x in lots], ensure_ascii=False, indent=2), "utf-8")
187
+
188
+ def save_csv(lots: List[Lot], path: Path) -> None:
189
+ if not lots:
190
+ return
191
+ with path.open("w", newline="", encoding="utf-8") as f:
192
+ writer = csv.DictWriter(f, fieldnames=asdict(lots[0]).keys())
193
+ writer.writeheader()
194
+ for row in lots:
195
+ writer.writerow(asdict(row))
196
+
197
+
198
+ # CLI entry unchanged
199
+ if __name__ == "__main__":
200
+ def _cli() -> None:
201
+ parser = argparse.ArgumentParser(description="Scrape FunPay lots into JSON/CSV files")
202
+ parser.add_argument("section", type=int, help="Section numeric ID (e.g. 223 for Minecraft Services)")
203
+ parser.add_argument("--pages", type=int, default=1, help="Number of paginated screens to scan")
204
+ parser.add_argument("--out", type=Path, help="Output file (.json or .csv)")
205
+ parser.add_argument("--headed", action="store_true", help="Run browser in headed mode (for debugging)")
206
+
207
+ args = parser.parse_args()
208
+
209
+ funpay = FunpayParser(headless=not args.headed)
210
+ lots = funpay.parse(args.section, pages=args.pages)
211
+ print(f"Collected {len(lots)} lots from section #{args.section}")
212
+
213
+ if args.out:
214
+ if args.out.suffix == ".json":
215
+ save_json(lots, args.out)
216
+ elif args.out.suffix == ".csv":
217
+ save_csv(lots, args.out)
218
+ else:
219
+ raise SystemExit("Supported output formats: .json / .csv")
220
+ print(f"Saved to {args.out.resolve()}")
221
+ else:
222
+ from pprint import pprint
223
+ pprint(lots[:10])
224
+ ```