File size: 6,569 Bytes
092e58d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
from flask import Blueprint, render_template, request, jsonify, current_app
from flask_login import login_required, current_user
from app.services.google_scraper import GoogleAdsScraper
from app.models.google_ad import GoogleAd
from app.services.ai_processor import AIPipeline
from app import db, celery
import logging

logger = logging.getLogger(__name__)
google_ads_bp = Blueprint('google_ads', __name__, url_prefix='/google-ads')

@google_ads_bp.route('/', methods=['GET'])
@login_required
def index():
    """Google Ads dashboard page."""
    return render_template('google_ads/index.html')

@google_ads_bp.route('/search', methods=['GET', 'POST'])
@login_required
def search_ads():
    """Search for Google Ads."""
    if request.method == 'POST':
        search_query = request.form.get('query')
        num_pages = int(request.form.get('num_pages', 3))
        
        # Start async task for scraping
        task = scrape_google_search_ads.delay(search_query, num_pages, current_user.id)
        
        return jsonify({
            'status': 'success',
            'message': 'Google Ads scraping started',
            'task_id': task.id
        })
    
    # GET request - show search form
    return render_template('google_ads/search.html')

@google_ads_bp.route('/display', methods=['GET', 'POST'])
@login_required
def display_ads():
    """Scrape display ads from a URL."""
    if request.method == 'POST':
        target_url = request.form.get('url')
        scroll_count = int(request.form.get('scroll_count', 5))
        
        # Start async task for scraping
        task = scrape_google_display_ads.delay(target_url, scroll_count, current_user.id)
        
        return jsonify({
            'status': 'success',
            'message': 'Google Display Ads scraping started',
            'task_id': task.id
        })
    
    # GET request - show form
    return render_template('google_ads/display.html')

@google_ads_bp.route('/results', methods=['GET'])
@login_required
def view_results():
    """View Google Ads results."""
    ad_type = request.args.get('type', 'all')
    query = request.args.get('query', '')
    
    # Build query
    ads_query = GoogleAd.query
    
    if ad_type != 'all':
        ads_query = ads_query.filter(GoogleAd.ad_type == ad_type)
    
    if query:
        ads_query = ads_query.filter(
            (GoogleAd.title.ilike(f'%{query}%')) | 
            (GoogleAd.description.ilike(f'%{query}%')) |
            (GoogleAd.search_query.ilike(f'%{query}%'))
        )
    
    # Get results
    ads = ads_query.order_by(GoogleAd.created_at.desc()).all()
    
    return render_template('google_ads/results.html', ads=ads, ad_type=ad_type, query=query)

@google_ads_bp.route('/api/ads', methods=['GET'])
@login_required
def api_get_ads():
    """API endpoint to get Google Ads data."""
    ad_type = request.args.get('type', 'all')
    query = request.args.get('query', '')
    limit = int(request.args.get('limit', 50))
    
    # Build query
    ads_query = GoogleAd.query
    
    if ad_type != 'all':
        ads_query = ads_query.filter(GoogleAd.ad_type == ad_type)
    
    if query:
        ads_query = ads_query.filter(
            (GoogleAd.title.ilike(f'%{query}%')) | 
            (GoogleAd.description.ilike(f'%{query}%')) |
            (GoogleAd.search_query.ilike(f'%{query}%'))
        )
    
    # Get results
    ads = ads_query.order_by(GoogleAd.created_at.desc()).limit(limit).all()
    
    # Convert to JSON
    result = []
    for ad in ads:
        ad_data = {
            'id': ad.id,
            'ad_type': ad.ad_type,
            'title': ad.title,
            'description': ad.description,
            'display_url': ad.display_url,
            'target_url': ad.target_url,
            'image_url': ad.image_url,
            'position': ad.position,
            'search_query': ad.search_query,
            'page_url': ad.page_url,
            'sentiment': ad.sentiment,
            'created_at': ad.created_at.isoformat() if ad.created_at else None
        }
        result.append(ad_data)
    
    return jsonify(result)

@celery.task
def scrape_google_search_ads(search_query, num_pages, user_id):
    """Celery task to scrape Google search ads."""
    try:
        scraper = GoogleAdsScraper()
        ads_data = scraper.scrape_search_ads(search_query, num_pages)
        
        # Process and store ads
        ai_pipeline = AIPipeline()
        
        for ad_data in ads_data:
            # Create GoogleAd instance
            ad = GoogleAd.from_search_ad_data(ad_data, search_query, user_id)
            
            # Process with AI if there's content
            if ad.title or ad.description:
                try:
                    # Create a simple object with content for AI processing
                    ad_content = type('obj', (object,), {
                        'content': f"{ad.title} {ad.description}"
                    })
                    
                    # Process with AI
                    ai_results = ai_pipeline.process_ad(ad_content)
                    ad.sentiment = ai_results.get('sentiment')
                except Exception as e:
                    logger.error(f"Error processing ad with AI: {e}")
            
            # Save to database
            db.session.add(ad)
        
        db.session.commit()
        return {'status': 'success', 'count': len(ads_data)}
    
    except Exception as e:
        logger.error(f"Error in Google search ads scraping task: {e}")
        db.session.rollback()
        return {'status': 'error', 'message': str(e)}

@celery.task
def scrape_google_display_ads(target_url, scroll_count, user_id):
    """Celery task to scrape Google display ads."""
    try:
        scraper = GoogleAdsScraper()
        ads_data = scraper.scrape_display_ads(target_url, scroll_count)
        
        # Process and store ads
        for ad_data in ads_data:
            # Create GoogleAd instance
            ad = GoogleAd.from_display_ad_data(ad_data, user_id)
            
            # Save to database
            db.session.add(ad)
        
        db.session.commit()
        return {'status': 'success', 'count': len(ads_data)}
    
    except Exception as e:
        logger.error(f"Error in Google display ads scraping task: {e}")
        db.session.rollback()
        return {'status': 'error', 'message': str(e)}