518_yt_monitor / src /utils /cache.py
atoye1's picture
major commit with data files
b9cc1a2
import json
from pathlib import Path
from datetime import datetime
from typing import Optional
from ..models.schemas import VideoAnalysis, ChannelAnalysis
class CacheManager:
def __init__(self, cache_dir: str = "cache"):
self.cache_dir = Path(cache_dir)
self.videos_dir = self.cache_dir / "videos"
self.channels_dir = self.cache_dir / "channels"
# ์บ์‹œ ๋””๋ ‰ํ† ๋ฆฌ ์ƒ์„ฑ
self.videos_dir.mkdir(parents=True, exist_ok=True)
self.channels_dir.mkdir(parents=True, exist_ok=True)
def get_video_analysis(self, video_id: str) -> Optional[VideoAnalysis]:
video_file = self.videos_dir / f"{video_id}.json"
if not video_file.exists():
return None
with video_file.open('r', encoding='utf-8') as f:
return VideoAnalysis.from_dict(json.load(f))
def save_video_analysis(self, analysis: VideoAnalysis):
video_file = self.videos_dir / f"{analysis.video_id}.json"
with video_file.open('w', encoding='utf-8') as f:
json.dump(analysis.to_dict(), f, ensure_ascii=False, indent=2)
def get_channel_analysis(self, channel_id: str) -> Optional[ChannelAnalysis]:
channel_file = self.channels_dir / f"{channel_id}.json"
if not channel_file.exists():
return None
with channel_file.open('r', encoding='utf-8') as f:
data = json.load(f)
return ChannelAnalysis(
channel_id=data['channel_id'],
channel_name=data['channel_name'],
last_analyzed=datetime.fromisoformat(data['last_analyzed']),
analyzed_videos=data['analyzed_videos']
)
def save_channel_analysis(self, analysis: ChannelAnalysis):
channel_file = self.channels_dir / f"{analysis.channel_id}.json"
with channel_file.open('w', encoding='utf-8') as f:
json.dump({
'channel_id': analysis.channel_id,
'channel_name': analysis.channel_name,
'last_analyzed': analysis.last_analyzed.isoformat(),
'analyzed_videos': analysis.analyzed_videos
}, f, ensure_ascii=False, indent=2)