from src.crawler import DatabaseUpdater from src.utils import logger class DatabaseService: def __init__(self): self.database_updater = DatabaseUpdater self.file_paths = ["sifars.md", "knowledge_base.txt", "scrapped.txt"] async def __aenter__(self): return self async def __aexit__(self, exc_type, exc_val, exc_tb): pass async def _update_database(self, urls: list[str], knowledge_base: bool): if not urls and not knowledge_base: return "Nothing to update" if urls: async with self.database_updater() as database_updater: await database_updater._clear_old_files(file_paths=self.file_paths[1:2]) for url in urls: await database_updater._extract_scraped_data(url=url) if knowledge_base: async with self.database_updater() as database_updater: await database_updater._clear_old_files(file_paths=self.file_paths[:1]) await database_updater._extract_knowledge_base() await database_updater._delete_old_database() await database_updater._update_database_from_file(file_paths=self.file_paths) return "Database updated"