| | |
| | import os |
| | import sys |
| | import json |
| | import time |
| | import asyncio |
| | import logging |
| | from pathlib import Path |
| | from tqdm.asyncio import tqdm |
| | from argparse import ArgumentParser |
| | from playwright.async_api import async_playwright |
| |
|
| | cmd_args = True |
| | parser = ArgumentParser() |
| | parser.add_argument('--test_dir', default='./test_webpages', help='the directory of test webpages.') |
| | parser.add_argument('--inference_dir', default='./inference', help='the directory of model output webpages.') |
| | parser.add_argument('--save_dir', default='./save_results', help='the directory for saving result info jsonl file.') |
| | parser.add_argument('--model_name', default="Qwen2.5-VL-32B-Instruct", help='using the vlms for your inference') |
| | parser.add_argument('--num_workers', default=50, help='num workers for computing') |
| | parser.add_argument('--log_dir', default='./', help='num workers for computing') |
| | if not cmd_args: |
| | args = parser.parse_args([]) |
| | else: |
| | args = parser.parse_args() |
| |
|
| | MODEL_NAME = os.path.basename(args.model_name) |
| | LOG_PATH = os.path.join(args.log_dir, f'get_evaluation_{MODEL_NAME}.log') |
| | INFERENCE_DIR = args.inference_dir |
| | ORI_DIR = args.test_dir |
| | SAVE_PATH = os.path.join(args.save_dir, {MODEL_NAME}.jsonl) |
| |
|
| | |
| | with open("./scripts/js/vue.global.js", "r", encoding="utf-8") as f: |
| | vue_code = f.read() |
| | with open("./scripts/js/one-color-all.js", "r", encoding="utf-8") as f: |
| | one_color_code = f.read() |
| | with open("./scripts/js/codeSim.js", "r", encoding="utf-8") as f: |
| | codesim_code = f.read() |
| |
|
| | |
| | NUM_WORKERS = args.num_workers |
| |
|
| |
|
| | def setup_logger(is_console_handler=True): |
| | logger = logging.getLogger('web_scraping') |
| | logger.setLevel(logging.INFO) |
| |
|
| | file_handler = logging.FileHandler(LOG_PATH) |
| | file_handler.setLevel(logging.INFO) |
| |
|
| | if is_console_handler: |
| | console_handler = logging.StreamHandler(sys.stdout) |
| | console_handler.setLevel(logging.INFO) |
| |
|
| | formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s', |
| | datefmt='%Y-%m-%d %H:%M:%S') |
| | file_handler.setFormatter(formatter) |
| | if is_console_handler: |
| | console_handler.setFormatter(formatter) |
| |
|
| | logger.addHandler(file_handler) |
| | if is_console_handler: |
| | logger.addHandler(console_handler) |
| |
|
| | return logger |
| |
|
| |
|
| | cache = [] |
| | file_lock = asyncio.Lock() |
| |
|
| |
|
| | async def save_result(name, layoutSim, styleSim, force=False): |
| | if force: |
| | async with file_lock: |
| | with open(SAVE_PATH, 'a') as f: |
| | for c in cache: |
| | f.write(json.dumps(c) + '\n') |
| | cache.clear() |
| | return 0 |
| | save_item = { |
| | 'name': name, |
| | 'groupLayoutScore': layoutSim['groupLayoutScore'], |
| | 'overallScore': layoutSim['overallScore'], |
| | 'relativeLayoutScore': layoutSim['relativeLayoutScore'], |
| | 'relativeStyleScore': styleSim['relativeStyleScore'] |
| | } |
| | cache.append(save_item) |
| | if len(cache) >= NUM_WORKERS: |
| | async with file_lock: |
| | with open(SAVE_PATH, 'a') as f: |
| | for c in cache: |
| | f.write(json.dumps(c) + '\n') |
| | cache.clear() |
| |
|
| |
|
| | async def worker(name, queue, browser, logger, pbar): |
| | while not queue.empty(): |
| | html_item = await queue.get() |
| | name = os.path.splitext(html_item)[0] |
| | source_html_path = os.path.join(INFERENCE_DIR, f"{name}.html") |
| | target_html_path = os.path.join(ORI_DIR, name, "index.html") |
| | if not os.path.exists(source_html_path): |
| | logger.error(f"❌ File not Exsits: {source_html_path}") |
| | queue.task_done() |
| | pbar.update(1) |
| | continue |
| | if not os.path.exists(target_html_path): |
| | logger.error(f"❌ File not Exsits: {target_html_path}") |
| | queue.task_done() |
| | pbar.update(1) |
| | continue |
| | logger.info( |
| | f"⭐ push {source_html_path}") |
| | file_url = Path(source_html_path).as_uri() |
| |
|
| | start_time = time.perf_counter() |
| | page = await browser.new_page(viewport={'width': 1920, 'height': 1080}) |
| | try: |
| | try: |
| | await page.goto( |
| | file_url, |
| | timeout=10000, |
| | wait_until='domcontentloaded' |
| | ) |
| | except Exception as e: |
| | logger.error(f"⚠ Loading: {file_url}, Error: {e}") |
| | await page.add_script_tag(content=vue_code) |
| | await page.add_script_tag(content=one_color_code) |
| | await page.add_script_tag(content=codesim_code) |
| | sources = await page.evaluate("() => getElements()") |
| | elapsed = time.perf_counter() - start_time |
| | logger.info( |
| | f"✅ Source computed complete {source_html_path}, element count: {len(sources)} time: {elapsed:.2f}") |
| | except Exception as e: |
| | logger.error(f"❌ Source computed failed: {source_html_path}, Error: {e}") |
| | finally: |
| | await page.close() |
| |
|
| | file_url = Path(target_html_path).as_uri() |
| | sec_start_time = time.perf_counter() |
| | page = await browser.new_page(viewport={'width': 1920, 'height': 1080}) |
| | |
| | |
| | try: |
| | try: |
| | await page.goto( |
| | file_url, |
| | timeout=3000, |
| | wait_until='domcontentloaded' |
| | ) |
| | except Exception as e: |
| | logger.error(f"⚠ Loading: {file_url}, Error: {e}") |
| | await page.add_script_tag(content=vue_code) |
| | await page.add_script_tag(content=one_color_code) |
| | await page.add_script_tag(content=codesim_code) |
| | await page.evaluate("() => targetEls.value = getElements(false)") |
| | layoutSim = await page.evaluate("(sources) => getLayoutSim(sources)", sources) |
| | styleSim = await page.evaluate("(sources) => getStyleSim(sources)", sources) |
| | await save_result(name, layoutSim, styleSim) |
| | elapsed_total = time.perf_counter() - start_time |
| | elapsed = time.perf_counter() - sec_start_time |
| | logger.info( |
| | f"✅ Target computed complete {target_html_path}, layoutSim: {layoutSim['overallScore']}, styleSim: {styleSim['relativeStyleScore']}, element_count: {len(sources)} time: {elapsed:.2f} total_time: {elapsed_total:.2f}") |
| | except Exception as e: |
| | logger.error(f"❌ Target computed failed: {target_html_path}, Error: {e}") |
| | finally: |
| | await page.close() |
| | queue.task_done() |
| | pbar.update(1) |
| |
|
| |
|
| | async def main(): |
| | |
| | logger = setup_logger(False) |
| | source_list = os.listdir(INFERENCE_DIR) |
| | |
| | len_total = len(source_list) |
| | print('Total: ', len_total) |
| | exists_meta = [] |
| | if os.path.exists(SAVE_PATH): |
| | with open(SAVE_PATH) as f: |
| | exists_data = f.readlines() |
| | exists_data = [json.loads(item) for item in exists_data] |
| | for meta_info in exists_data: |
| | name = meta_info.get("name", 'none') |
| | exists_meta.append(name + '.html') |
| | |
| | exists_meta = set(exists_meta) |
| | source_list = [h for h in source_list if h not in exists_meta] |
| | print( |
| | f"Found: {len(exists_meta)} Matched: {len_total - len(source_list)} Remain: {len(source_list)}") |
| | |
| | |
| | queue = asyncio.Queue() |
| | for html_item in source_list: |
| | await queue.put(html_item) |
| |
|
| | async with async_playwright() as p: |
| | browser_list = [] |
| | for i in range(NUM_WORKERS): |
| | browser = await p.chromium.launch(headless=True, args=['--no-sandbox', '--disable-setuid-sandbox']) |
| | browser_list.append(browser) |
| | print('Browser Started') |
| | with tqdm(total=len(source_list), desc="Progress") as pbar: |
| | tasks = [] |
| | for i in range(NUM_WORKERS): |
| | browser = browser_list[i % len(browser_list)] |
| | tasks.append(asyncio.create_task( |
| | worker(f"worker-{i}", queue, browser, logger, pbar))) |
| | await queue.join() |
| | for t in tasks: |
| | t.cancel() |
| | for browser in browser_list: |
| | await browser.close() |
| | save_result(None, None, None, True) |
| |
|
| | if __name__ == "__main__": |
| | asyncio.run(main()) |
| |
|
| | |
| |
|