|
|
import requests |
|
|
from requests.adapters import HTTPAdapter |
|
|
from urllib3.util.retry import Retry |
|
|
import json |
|
|
import logging |
|
|
import sqlite3 |
|
|
import time |
|
|
from datetime import datetime |
|
|
import urllib3 |
|
|
import os |
|
|
from typing import List, Dict, Any, Optional, Set |
|
|
|
|
|
|
|
|
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) |
|
|
|
|
|
class FollowingCrawlerV5: |
|
|
def __init__(self, api_key: str, input_db: str, output_db: str = None): |
|
|
self.api_key = api_key |
|
|
self.base_url = "https://api.tweetscout.io/v2" |
|
|
self.input_db = input_db |
|
|
self.output_db = output_db or "following_expanded.db" |
|
|
self.progress_db = "crawler_progress.db" |
|
|
self.headers = { |
|
|
"ApiKey": api_key, |
|
|
"Accept": "application/json" |
|
|
} |
|
|
self.session = self._create_session() |
|
|
self._setup_logging() |
|
|
self._init_databases() |
|
|
|
|
|
def _create_session(self) -> requests.Session: |
|
|
session = requests.Session() |
|
|
|
|
|
retry_strategy = Retry( |
|
|
total=3, |
|
|
backoff_factor=10, |
|
|
status_forcelist=[429, 500, 502, 503, 504, 520, 521, 522, 523, 524, 525, 526, 527, 528, 598, 599], |
|
|
allowed_methods=["GET", "POST", "HEAD", "PUT", "DELETE", "OPTIONS", "TRACE"], |
|
|
respect_retry_after_header=True, |
|
|
raise_on_status=False |
|
|
) |
|
|
|
|
|
adapter = HTTPAdapter( |
|
|
max_retries=retry_strategy, |
|
|
pool_maxsize=10, |
|
|
pool_block=True |
|
|
) |
|
|
|
|
|
session.mount("http://", adapter) |
|
|
session.mount("https://", adapter) |
|
|
session.verify = False |
|
|
|
|
|
|
|
|
session.timeout = (60, 120) |
|
|
|
|
|
return session |
|
|
|
|
|
|
|
|
|
|
|
def _setup_logging(self): |
|
|
logging.basicConfig( |
|
|
level=logging.INFO, |
|
|
format='%(asctime)s - %(levelname)s - %(message)s', |
|
|
handlers=[ |
|
|
logging.FileHandler('crawler_expanded.log'), |
|
|
logging.StreamHandler() |
|
|
] |
|
|
) |
|
|
self.logger = logging.getLogger("FollowingCrawlerV5") |
|
|
|
|
|
def _init_databases(self): |
|
|
|
|
|
self.progress_conn = sqlite3.connect(self.progress_db) |
|
|
self.progress_cursor = self.progress_conn.cursor() |
|
|
self.progress_cursor.execute(''' |
|
|
CREATE TABLE IF NOT EXISTS processed_users ( |
|
|
user_id TEXT PRIMARY KEY, |
|
|
processed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP |
|
|
) |
|
|
''') |
|
|
self.progress_conn.commit() |
|
|
|
|
|
|
|
|
if not os.path.exists(self.output_db): |
|
|
self.output_conn = sqlite3.connect(self.output_db) |
|
|
self.output_cursor = self.output_conn.cursor() |
|
|
|
|
|
self.output_cursor.executescript(''' |
|
|
CREATE TABLE IF NOT EXISTS users ( |
|
|
user_id TEXT PRIMARY KEY, |
|
|
name TEXT, |
|
|
screen_name TEXT, |
|
|
description TEXT, |
|
|
followers_count INTEGER, |
|
|
friends_count INTEGER, |
|
|
tweets_count INTEGER, |
|
|
register_date TEXT, |
|
|
avatar TEXT, |
|
|
banner TEXT, |
|
|
verified BOOLEAN, |
|
|
can_dm BOOLEAN, |
|
|
last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP |
|
|
); |
|
|
|
|
|
CREATE TABLE IF NOT EXISTS following_relationships ( |
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT, |
|
|
user_id TEXT, |
|
|
following_of TEXT, |
|
|
collected_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, |
|
|
UNIQUE(user_id, following_of), |
|
|
FOREIGN KEY(user_id) REFERENCES users(user_id) |
|
|
); |
|
|
|
|
|
CREATE INDEX IF NOT EXISTS idx_user_id ON users(user_id); |
|
|
CREATE INDEX IF NOT EXISTS idx_followers_count ON users(followers_count); |
|
|
CREATE INDEX IF NOT EXISTS idx_following_rel ON following_relationships(user_id, following_of); |
|
|
''') |
|
|
self.output_conn.commit() |
|
|
else: |
|
|
self.output_conn = sqlite3.connect(self.output_db) |
|
|
self.output_cursor = self.output_conn.cursor() |
|
|
|
|
|
def get_unprocessed_users(self, batch_size: int = 100) -> List[Dict[str, Any]]: |
|
|
input_conn = sqlite3.connect(self.input_db) |
|
|
input_cursor = input_conn.cursor() |
|
|
|
|
|
try: |
|
|
self.logger.info("Starting to fetch unprocessed users...") |
|
|
|
|
|
self.progress_cursor.execute("ATTACH DATABASE ? AS input_db", (self.input_db,)) |
|
|
|
|
|
|
|
|
self.progress_cursor.execute( |
|
|
"SELECT user_id FROM processed_users WHERE user_id IN (SELECT user_id FROM input_db.users WHERE screen_name = ?)", |
|
|
('vc_btxcap',) |
|
|
) |
|
|
result = self.progress_cursor.fetchone() |
|
|
self.logger.info(f"User vc_btxcap processed status: {'Already processed' if result else 'Not processed'}") |
|
|
|
|
|
|
|
|
query = ''' |
|
|
SELECT DISTINCT u.user_id, u.screen_name, u.followers_count |
|
|
FROM input_db.users u |
|
|
WHERE u.user_id NOT IN ( |
|
|
SELECT user_id FROM processed_users |
|
|
) |
|
|
LIMIT ? |
|
|
''' |
|
|
|
|
|
self.progress_cursor.execute(query, (batch_size,)) |
|
|
users = [ |
|
|
{ |
|
|
"user_id": row[0], |
|
|
"screen_name": row[1], |
|
|
"followers_count": row[2] |
|
|
} |
|
|
for row in self.progress_cursor.fetchall() |
|
|
] |
|
|
|
|
|
|
|
|
self.progress_cursor.execute("DETACH DATABASE input_db") |
|
|
|
|
|
self.logger.info(f"Found {len(users)} unprocessed users") |
|
|
if users: |
|
|
self.logger.info(f"First unprocessed user: {users[0]['screen_name']}") |
|
|
|
|
|
return users |
|
|
|
|
|
except Exception as e: |
|
|
self.logger.error(f"Error getting unprocessed users: {str(e)}") |
|
|
return [] |
|
|
finally: |
|
|
input_conn.close() |
|
|
|
|
|
def get_user_following(self, user: Dict[str, Any], retries: int = 1) -> List[Dict[str, Any]]: |
|
|
all_following = [] |
|
|
retry_count = 0 |
|
|
cursor = None |
|
|
base_wait_time = 20 |
|
|
|
|
|
while True: |
|
|
try: |
|
|
url = f"{self.base_url}/follows" |
|
|
params = { |
|
|
"user_id": user['user_id'], |
|
|
"limit": 20 |
|
|
} |
|
|
|
|
|
if cursor: |
|
|
params['cursor'] = cursor |
|
|
|
|
|
|
|
|
|
|
|
self.logger.info(f"Waiting {base_wait_time} seconds before next request...") |
|
|
time.sleep(base_wait_time) |
|
|
|
|
|
response = self.session.get( |
|
|
url, |
|
|
headers=self.headers, |
|
|
params=params, |
|
|
timeout=(30, 60) |
|
|
) |
|
|
|
|
|
if response.status_code == 200: |
|
|
data = response.json() |
|
|
if isinstance(data, list): |
|
|
retry_count = 0 |
|
|
cursor = response.headers.get('x-pagination-cursor') |
|
|
all_following.extend(data) |
|
|
|
|
|
self.logger.info( |
|
|
f"Retrieved batch of {len(data)} following for {user['screen_name']}, " |
|
|
f"total: {len(all_following)}" |
|
|
) |
|
|
|
|
|
if not cursor: |
|
|
break |
|
|
|
|
|
elif response.status_code == 429: |
|
|
retry_after = int(response.headers.get('Retry-After', 30)) |
|
|
self.logger.warning(f"Rate limited. Waiting {retry_after} seconds...") |
|
|
time.sleep(retry_after + 5) |
|
|
continue |
|
|
|
|
|
else: |
|
|
raise requests.exceptions.RequestException( |
|
|
f"Status {response.status_code}, {response.text}" |
|
|
) |
|
|
|
|
|
except Exception as e: |
|
|
retry_count += 1 |
|
|
self.logger.error(f"Error getting following for {user['screen_name']}: {str(e)}") |
|
|
|
|
|
if retry_count >= retries: |
|
|
self.logger.warning( |
|
|
f"Max retries reached for {user['screen_name']}, recording failure and moving on..." |
|
|
) |
|
|
|
|
|
with open('failed_users.txt', 'a') as f: |
|
|
f.write(f"{user['screen_name']},{user['user_id']},{datetime.now()}\n") |
|
|
return [] |
|
|
|
|
|
wait_time = min(120, base_wait_time * (2 ** retry_count)) |
|
|
self.logger.info(f"Waiting {wait_time} seconds before retry {retry_count}") |
|
|
time.sleep(wait_time) |
|
|
|
|
|
filtered_following = [ |
|
|
f for f in all_following |
|
|
if isinstance(f.get('followers_count'), int) and f.get('followers_count', 0) > 5000 |
|
|
] |
|
|
|
|
|
self.logger.info( |
|
|
f"Retrieved {len(filtered_following)} filtered following (>5000) " |
|
|
f"for {user['screen_name']} out of {len(all_following)} total" |
|
|
) |
|
|
|
|
|
return filtered_following |
|
|
|
|
|
def save_following(self, user: Dict[str, Any], following_of: str) -> None: |
|
|
try: |
|
|
|
|
|
self.output_cursor.execute(''' |
|
|
INSERT OR REPLACE INTO users |
|
|
(user_id, name, screen_name, description, |
|
|
followers_count, friends_count, tweets_count, |
|
|
register_date, avatar, banner, verified, can_dm, |
|
|
last_updated) |
|
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP) |
|
|
''', ( |
|
|
user.get('id'), |
|
|
user.get('name'), |
|
|
user.get('screen_name'), |
|
|
user.get('description'), |
|
|
user.get('followers_count', 0), |
|
|
user.get('friends_count', 0), |
|
|
user.get('tweets_count', 0), |
|
|
user.get('register_date'), |
|
|
user.get('avatar'), |
|
|
user.get('banner'), |
|
|
user.get('verified', False), |
|
|
user.get('can_dm', False) |
|
|
)) |
|
|
|
|
|
|
|
|
self.output_cursor.execute(''' |
|
|
INSERT OR IGNORE INTO following_relationships |
|
|
(user_id, following_of) |
|
|
VALUES (?, ?) |
|
|
''', (user.get('id'), following_of)) |
|
|
|
|
|
self.output_conn.commit() |
|
|
|
|
|
except sqlite3.Error as e: |
|
|
self.logger.error(f"Database error saving following: {str(e)}") |
|
|
self.output_conn.rollback() |
|
|
except Exception as e: |
|
|
self.logger.error(f"Error saving following: {str(e)}") |
|
|
self.output_conn.rollback() |
|
|
|
|
|
def mark_user_processed(self, user_id: str) -> None: |
|
|
try: |
|
|
self.progress_cursor.execute(''' |
|
|
INSERT OR REPLACE INTO processed_users (user_id) |
|
|
VALUES (?) |
|
|
''', (user_id,)) |
|
|
self.progress_conn.commit() |
|
|
except sqlite3.Error as e: |
|
|
self.logger.error(f"Error marking user as processed: {str(e)}") |
|
|
self.progress_conn.rollback() |
|
|
|
|
|
def process_user_batch(self, users: List[Dict[str, Any]]) -> None: |
|
|
for user in users: |
|
|
try: |
|
|
self.logger.info(f"Processing user: {user['screen_name']}") |
|
|
following_users = self.get_user_following(user) |
|
|
|
|
|
for idx, following_user in enumerate(following_users, 1): |
|
|
self.save_following(following_user, user['screen_name']) |
|
|
if idx % 100 == 0: |
|
|
self.logger.info( |
|
|
f"Processed {idx}/{len(following_users)} following " |
|
|
f"for {user['screen_name']}" |
|
|
) |
|
|
time.sleep(0.5) |
|
|
|
|
|
self.mark_user_processed(user['user_id']) |
|
|
self.logger.info( |
|
|
f"Completed processing {len(following_users)} following " |
|
|
f"for {user['screen_name']}" |
|
|
) |
|
|
|
|
|
|
|
|
time.sleep(30) |
|
|
|
|
|
except Exception as e: |
|
|
self.logger.error(f"Error processing user {user['screen_name']}: {str(e)}") |
|
|
continue |
|
|
|
|
|
def get_progress_stats(self) -> None: |
|
|
try: |
|
|
|
|
|
input_conn = sqlite3.connect(self.input_db) |
|
|
input_cursor = input_conn.cursor() |
|
|
input_cursor.execute("SELECT COUNT(DISTINCT user_id) FROM users") |
|
|
total_users = input_cursor.fetchone()[0] |
|
|
input_conn.close() |
|
|
|
|
|
|
|
|
self.progress_cursor.execute("SELECT COUNT(*) FROM processed_users") |
|
|
processed_users = self.progress_cursor.fetchone()[0] |
|
|
|
|
|
|
|
|
self.output_cursor.execute("SELECT COUNT(*) FROM following_relationships") |
|
|
total_relationships = self.output_cursor.fetchone()[0] |
|
|
|
|
|
self.logger.info(f"\nProgress Statistics:") |
|
|
self.logger.info(f"Total users to process: {total_users}") |
|
|
self.logger.info(f"Processed users: {processed_users}") |
|
|
self.logger.info(f"Progress: {(processed_users/total_users)*100:.2f}%") |
|
|
self.logger.info(f"Total following relationships collected: {total_relationships}") |
|
|
|
|
|
except sqlite3.Error as e: |
|
|
self.logger.error(f"Error getting progress stats: {str(e)}") |
|
|
|
|
|
|
|
|
|
|
|
def run(self, batch_size: int = 15): |
|
|
max_retries = 3 |
|
|
retry_count = 0 |
|
|
|
|
|
while retry_count < max_retries: |
|
|
try: |
|
|
self.logger.info("Starting expanded following crawler") |
|
|
|
|
|
|
|
|
last_processed_user = None |
|
|
if os.path.exists('checkpoint.txt'): |
|
|
with open('checkpoint.txt', 'r') as f: |
|
|
last_processed_user = f.read().strip() |
|
|
self.logger.info(f"Resuming from user: {last_processed_user}") |
|
|
|
|
|
while True: |
|
|
users = self.get_unprocessed_users(batch_size) |
|
|
|
|
|
if users: |
|
|
self.logger.info("Debug - All users in batch:") |
|
|
for user in users: |
|
|
self.logger.info(f" - {user['screen_name']}") |
|
|
|
|
|
if not users: |
|
|
break |
|
|
|
|
|
|
|
|
if last_processed_user: |
|
|
for idx, user in enumerate(users): |
|
|
self.logger.info(f"Checking user: {user['screen_name']}") |
|
|
if user['screen_name'] == last_processed_user: |
|
|
users = users[idx+1:] |
|
|
last_processed_user = None |
|
|
self.logger.info("Found checkpoint user!") |
|
|
break |
|
|
|
|
|
self.process_user_batch(users) |
|
|
|
|
|
|
|
|
with open('checkpoint.txt', 'w') as f: |
|
|
f.write(users[-1]['screen_name']) |
|
|
|
|
|
self.get_progress_stats() |
|
|
batch_delay = 180 |
|
|
self.logger.info(f"Waiting {batch_delay} seconds before next batch...") |
|
|
time.sleep(batch_delay) |
|
|
|
|
|
break |
|
|
|
|
|
except Exception as e: |
|
|
retry_count += 1 |
|
|
self.logger.error(f"Error in main run loop (attempt {retry_count}/{max_retries}): {str(e)}") |
|
|
|
|
|
if retry_count < max_retries: |
|
|
wait_time = 60 * retry_count |
|
|
self.logger.info(f"Waiting {wait_time} seconds before retrying...") |
|
|
time.sleep(wait_time) |
|
|
else: |
|
|
self.logger.error("Max retries reached, stopping crawler") |
|
|
|
|
|
finally: |
|
|
self.progress_conn.close() |
|
|
self.output_conn.close() |
|
|
self.logger.info("Crawler finished, database connections closed") |
|
|
|
|
|
|
|
|
def main(): |
|
|
crawler = FollowingCrawlerV5( |
|
|
api_key="Enter your API KEY", |
|
|
input_db="following.db", |
|
|
output_db="following_expanded.db" |
|
|
) |
|
|
crawler.run(batch_size=15) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
main() |