Datasets:

Modalities:
Text
Formats:
text
Size:
< 1K
Libraries:
Datasets
License:
naseele commited on
Commit
e3f30ae
·
1 Parent(s): 6c7aca3

Upload 01GetFollowing

Browse files
01GetFollowing/following.db ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:167e1d8e30b2bb690c8f3f114c302e65f022460a7133e2334de11db26b4c813d
3
+ size 1593344
01GetFollowing/requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ requests>=2.25.0
2
+ urllib3>=2.0.0
3
+ typing>=3.7.4
4
+ sqlite3-api>=2.0.0
01GetFollowing/result/following_expanded.db ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e31c557f312ca220f4f25fc4d179880201e8db5e52bbe765c98413388ffc044a
3
+ size 123457536
01GetFollowing/u03-5-4.py ADDED
@@ -0,0 +1,442 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+ from requests.adapters import HTTPAdapter
3
+ from urllib3.util.retry import Retry
4
+ import json
5
+ import logging
6
+ import sqlite3
7
+ import time
8
+ from datetime import datetime
9
+ import urllib3
10
+ import os
11
+ from typing import List, Dict, Any, Optional, Set
12
+
13
+ # Disable SSL warnings
14
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
15
+
16
+ class FollowingCrawlerV5:
17
+ def __init__(self, api_key: str, input_db: str, output_db: str = None):
18
+ self.api_key = api_key
19
+ self.base_url = "https://api.tweetscout.io/v2"
20
+ self.input_db = input_db
21
+ self.output_db = output_db or "following_expanded.db"
22
+ self.progress_db = "crawler_progress.db"
23
+ self.headers = {
24
+ "ApiKey": api_key,
25
+ "Accept": "application/json"
26
+ }
27
+ self.session = self._create_session()
28
+ self._setup_logging()
29
+ self._init_databases()
30
+
31
+ def _create_session(self) -> requests.Session:
32
+ session = requests.Session()
33
+
34
+ retry_strategy = Retry(
35
+ total=3,
36
+ backoff_factor=10, # 基础等待时间
37
+ status_forcelist=[429, 500, 502, 503, 504, 520, 521, 522, 523, 524, 525, 526, 527, 528, 598, 599], # 增加更多状态码
38
+ allowed_methods=["GET", "POST", "HEAD", "PUT", "DELETE", "OPTIONS", "TRACE"],
39
+ respect_retry_after_header=True,
40
+ raise_on_status=False
41
+ )
42
+
43
+ adapter = HTTPAdapter(
44
+ max_retries=retry_strategy,
45
+ pool_maxsize=10,
46
+ pool_block=True
47
+ )
48
+
49
+ session.mount("http://", adapter)
50
+ session.mount("https://", adapter)
51
+ session.verify = False
52
+
53
+ # 设置更长的超时
54
+ session.timeout = (60, 120) # 连接超时60秒,读取超时180秒
55
+
56
+ return session
57
+
58
+
59
+
60
+ def _setup_logging(self):
61
+ logging.basicConfig(
62
+ level=logging.INFO,
63
+ format='%(asctime)s - %(levelname)s - %(message)s',
64
+ handlers=[
65
+ logging.FileHandler('crawler_expanded.log'),
66
+ logging.StreamHandler()
67
+ ]
68
+ )
69
+ self.logger = logging.getLogger("FollowingCrawlerV5")
70
+
71
+ def _init_databases(self):
72
+ # Initialize progress tracking database
73
+ self.progress_conn = sqlite3.connect(self.progress_db)
74
+ self.progress_cursor = self.progress_conn.cursor()
75
+ self.progress_cursor.execute('''
76
+ CREATE TABLE IF NOT EXISTS processed_users (
77
+ user_id TEXT PRIMARY KEY,
78
+ processed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
79
+ )
80
+ ''')
81
+ self.progress_conn.commit()
82
+
83
+ # Initialize output database
84
+ if not os.path.exists(self.output_db):
85
+ self.output_conn = sqlite3.connect(self.output_db)
86
+ self.output_cursor = self.output_conn.cursor()
87
+
88
+ self.output_cursor.executescript('''
89
+ CREATE TABLE IF NOT EXISTS users (
90
+ user_id TEXT PRIMARY KEY,
91
+ name TEXT,
92
+ screen_name TEXT,
93
+ description TEXT,
94
+ followers_count INTEGER,
95
+ friends_count INTEGER,
96
+ tweets_count INTEGER,
97
+ register_date TEXT,
98
+ avatar TEXT,
99
+ banner TEXT,
100
+ verified BOOLEAN,
101
+ can_dm BOOLEAN,
102
+ last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP
103
+ );
104
+
105
+ CREATE TABLE IF NOT EXISTS following_relationships (
106
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
107
+ user_id TEXT,
108
+ following_of TEXT,
109
+ collected_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
110
+ UNIQUE(user_id, following_of),
111
+ FOREIGN KEY(user_id) REFERENCES users(user_id)
112
+ );
113
+
114
+ CREATE INDEX IF NOT EXISTS idx_user_id ON users(user_id);
115
+ CREATE INDEX IF NOT EXISTS idx_followers_count ON users(followers_count);
116
+ CREATE INDEX IF NOT EXISTS idx_following_rel ON following_relationships(user_id, following_of);
117
+ ''')
118
+ self.output_conn.commit()
119
+ else:
120
+ self.output_conn = sqlite3.connect(self.output_db)
121
+ self.output_cursor = self.output_conn.cursor()
122
+
123
+ def get_unprocessed_users(self, batch_size: int = 100) -> List[Dict[str, Any]]:
124
+ input_conn = sqlite3.connect(self.input_db)
125
+ input_cursor = input_conn.cursor()
126
+
127
+ try:
128
+ self.logger.info("Starting to fetch unprocessed users...")
129
+
130
+ self.progress_cursor.execute("ATTACH DATABASE ? AS input_db", (self.input_db,))
131
+
132
+ # 检查用户处理状态
133
+ self.progress_cursor.execute(
134
+ "SELECT user_id FROM processed_users WHERE user_id IN (SELECT user_id FROM input_db.users WHERE screen_name = ?)",
135
+ ('vc_btxcap',)
136
+ )
137
+ result = self.progress_cursor.fetchone()
138
+ self.logger.info(f"User vc_btxcap processed status: {'Already processed' if result else 'Not processed'}")
139
+
140
+ # 获取未处理的用户
141
+ query = '''
142
+ SELECT DISTINCT u.user_id, u.screen_name, u.followers_count
143
+ FROM input_db.users u
144
+ WHERE u.user_id NOT IN (
145
+ SELECT user_id FROM processed_users
146
+ )
147
+ LIMIT ?
148
+ '''
149
+
150
+ self.progress_cursor.execute(query, (batch_size,))
151
+ users = [
152
+ {
153
+ "user_id": row[0],
154
+ "screen_name": row[1],
155
+ "followers_count": row[2]
156
+ }
157
+ for row in self.progress_cursor.fetchall()
158
+ ]
159
+
160
+ # 断开数据库连接
161
+ self.progress_cursor.execute("DETACH DATABASE input_db")
162
+
163
+ self.logger.info(f"Found {len(users)} unprocessed users")
164
+ if users:
165
+ self.logger.info(f"First unprocessed user: {users[0]['screen_name']}")
166
+
167
+ return users
168
+
169
+ except Exception as e:
170
+ self.logger.error(f"Error getting unprocessed users: {str(e)}")
171
+ return []
172
+ finally:
173
+ input_conn.close()
174
+
175
+ def get_user_following(self, user: Dict[str, Any], retries: int = 1) -> List[Dict[str, Any]]: # 增加重试次数
176
+ all_following = []
177
+ retry_count = 0
178
+ cursor = None
179
+ base_wait_time = 20 # 基础等待时间
180
+
181
+ while True:
182
+ try:
183
+ url = f"{self.base_url}/follows"
184
+ params = {
185
+ "user_id": user['user_id'],
186
+ "limit": 20
187
+ }
188
+
189
+ if cursor:
190
+ params['cursor'] = cursor
191
+
192
+ # 增加请求前的等待时间
193
+ # wait_time = base_wait_time + (len(all_following) // 100) * 10 # 每100条数据增加10秒等待
194
+ self.logger.info(f"Waiting {base_wait_time} seconds before next request...")
195
+ time.sleep(base_wait_time)
196
+
197
+ response = self.session.get(
198
+ url,
199
+ headers=self.headers,
200
+ params=params,
201
+ timeout=(30, 60) # 匹配session的超时设置
202
+ )
203
+
204
+ if response.status_code == 200:
205
+ data = response.json()
206
+ if isinstance(data, list):
207
+ retry_count = 0
208
+ cursor = response.headers.get('x-pagination-cursor')
209
+ all_following.extend(data)
210
+
211
+ self.logger.info(
212
+ f"Retrieved batch of {len(data)} following for {user['screen_name']}, "
213
+ f"total: {len(all_following)}"
214
+ )
215
+
216
+ if not cursor:
217
+ break
218
+
219
+ elif response.status_code == 429:
220
+ retry_after = int(response.headers.get('Retry-After', 30)) # 默认等待时间增30秒
221
+ self.logger.warning(f"Rate limited. Waiting {retry_after} seconds...")
222
+ time.sleep(retry_after + 5) # 额外等待5秒
223
+ continue
224
+
225
+ else:
226
+ raise requests.exceptions.RequestException(
227
+ f"Status {response.status_code}, {response.text}"
228
+ )
229
+
230
+ except Exception as e:
231
+ retry_count += 1
232
+ self.logger.error(f"Error getting following for {user['screen_name']}: {str(e)}")
233
+
234
+ if retry_count >= retries:
235
+ self.logger.warning(
236
+ f"Max retries reached for {user['screen_name']}, recording failure and moving on..."
237
+ )
238
+ # 记录失败的用户
239
+ with open('failed_users.txt', 'a') as f:
240
+ f.write(f"{user['screen_name']},{user['user_id']},{datetime.now()}\n")
241
+ return [] # 直接返回空列表,跳过这个用户
242
+
243
+ wait_time = min(120, base_wait_time * (2 ** retry_count)) # 最多等待2分钟
244
+ self.logger.info(f"Waiting {wait_time} seconds before retry {retry_count}")
245
+ time.sleep(wait_time)
246
+
247
+ filtered_following = [
248
+ f for f in all_following
249
+ if isinstance(f.get('followers_count'), int) and f.get('followers_count', 0) > 5000
250
+ ]
251
+
252
+ self.logger.info(
253
+ f"Retrieved {len(filtered_following)} filtered following (>5000) "
254
+ f"for {user['screen_name']} out of {len(all_following)} total"
255
+ )
256
+
257
+ return filtered_following
258
+
259
+ def save_following(self, user: Dict[str, Any], following_of: str) -> None:
260
+ try:
261
+ # Insert or update user information
262
+ self.output_cursor.execute('''
263
+ INSERT OR REPLACE INTO users
264
+ (user_id, name, screen_name, description,
265
+ followers_count, friends_count, tweets_count,
266
+ register_date, avatar, banner, verified, can_dm,
267
+ last_updated)
268
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
269
+ ''', (
270
+ user.get('id'),
271
+ user.get('name'),
272
+ user.get('screen_name'),
273
+ user.get('description'),
274
+ user.get('followers_count', 0),
275
+ user.get('friends_count', 0),
276
+ user.get('tweets_count', 0),
277
+ user.get('register_date'),
278
+ user.get('avatar'),
279
+ user.get('banner'),
280
+ user.get('verified', False),
281
+ user.get('can_dm', False)
282
+ ))
283
+
284
+ # Insert following relationship
285
+ self.output_cursor.execute('''
286
+ INSERT OR IGNORE INTO following_relationships
287
+ (user_id, following_of)
288
+ VALUES (?, ?)
289
+ ''', (user.get('id'), following_of))
290
+
291
+ self.output_conn.commit()
292
+
293
+ except sqlite3.Error as e:
294
+ self.logger.error(f"Database error saving following: {str(e)}")
295
+ self.output_conn.rollback()
296
+ except Exception as e:
297
+ self.logger.error(f"Error saving following: {str(e)}")
298
+ self.output_conn.rollback()
299
+
300
+ def mark_user_processed(self, user_id: str) -> None:
301
+ try:
302
+ self.progress_cursor.execute('''
303
+ INSERT OR REPLACE INTO processed_users (user_id)
304
+ VALUES (?)
305
+ ''', (user_id,))
306
+ self.progress_conn.commit()
307
+ except sqlite3.Error as e:
308
+ self.logger.error(f"Error marking user as processed: {str(e)}")
309
+ self.progress_conn.rollback()
310
+
311
+ def process_user_batch(self, users: List[Dict[str, Any]]) -> None:
312
+ for user in users:
313
+ try:
314
+ self.logger.info(f"Processing user: {user['screen_name']}")
315
+ following_users = self.get_user_following(user)
316
+
317
+ for idx, following_user in enumerate(following_users, 1):
318
+ self.save_following(following_user, user['screen_name'])
319
+ if idx % 100 == 0:
320
+ self.logger.info(
321
+ f"Processed {idx}/{len(following_users)} following "
322
+ f"for {user['screen_name']}"
323
+ )
324
+ time.sleep(0.5) # Increased delay between saves
325
+
326
+ self.mark_user_processed(user['user_id'])
327
+ self.logger.info(
328
+ f"Completed processing {len(following_users)} following "
329
+ f"for {user['screen_name']}"
330
+ )
331
+
332
+ # Longer pause between users
333
+ time.sleep(30) # Increased to 30 seconds
334
+
335
+ except Exception as e:
336
+ self.logger.error(f"Error processing user {user['screen_name']}: {str(e)}")
337
+ continue
338
+
339
+ def get_progress_stats(self) -> None:
340
+ try:
341
+ # Get total users to process from input database
342
+ input_conn = sqlite3.connect(self.input_db)
343
+ input_cursor = input_conn.cursor()
344
+ input_cursor.execute("SELECT COUNT(DISTINCT user_id) FROM users")
345
+ total_users = input_cursor.fetchone()[0]
346
+ input_conn.close()
347
+
348
+ # Get processed users count
349
+ self.progress_cursor.execute("SELECT COUNT(*) FROM processed_users")
350
+ processed_users = self.progress_cursor.fetchone()[0]
351
+
352
+ # Get collected relationships
353
+ self.output_cursor.execute("SELECT COUNT(*) FROM following_relationships")
354
+ total_relationships = self.output_cursor.fetchone()[0]
355
+
356
+ self.logger.info(f"\nProgress Statistics:")
357
+ self.logger.info(f"Total users to process: {total_users}")
358
+ self.logger.info(f"Processed users: {processed_users}")
359
+ self.logger.info(f"Progress: {(processed_users/total_users)*100:.2f}%")
360
+ self.logger.info(f"Total following relationships collected: {total_relationships}")
361
+
362
+ except sqlite3.Error as e:
363
+ self.logger.error(f"Error getting progress stats: {str(e)}")
364
+
365
+
366
+
367
+ def run(self, batch_size: int = 15):
368
+ max_retries = 3
369
+ retry_count = 0
370
+
371
+ while retry_count < max_retries:
372
+ try:
373
+ self.logger.info("Starting expanded following crawler")
374
+
375
+ # 读取上次的进度
376
+ last_processed_user = None
377
+ if os.path.exists('checkpoint.txt'):
378
+ with open('checkpoint.txt', 'r') as f:
379
+ last_processed_user = f.read().strip()
380
+ self.logger.info(f"Resuming from user: {last_processed_user}")
381
+
382
+ while True:
383
+ users = self.get_unprocessed_users(batch_size)
384
+
385
+ if users:
386
+ self.logger.info("Debug - All users in batch:")
387
+ for user in users:
388
+ self.logger.info(f" - {user['screen_name']}")
389
+
390
+ if not users:
391
+ break
392
+
393
+ # 如果有断点,跳过直到找到上次处理的用户
394
+ if last_processed_user:
395
+ for idx, user in enumerate(users):
396
+ self.logger.info(f"Checking user: {user['screen_name']}")
397
+ if user['screen_name'] == last_processed_user:
398
+ users = users[idx+1:]
399
+ last_processed_user = None
400
+ self.logger.info("Found checkpoint user!")
401
+ break
402
+
403
+ self.process_user_batch(users)
404
+
405
+ # 记录最后处理的用户
406
+ with open('checkpoint.txt', 'w') as f:
407
+ f.write(users[-1]['screen_name'])
408
+
409
+ self.get_progress_stats()
410
+ batch_delay = 180 # 3分钟
411
+ self.logger.info(f"Waiting {batch_delay} seconds before next batch...")
412
+ time.sleep(batch_delay)
413
+
414
+ break # 成功完成,退出重试循环
415
+
416
+ except Exception as e:
417
+ retry_count += 1
418
+ self.logger.error(f"Error in main run loop (attempt {retry_count}/{max_retries}): {str(e)}")
419
+
420
+ if retry_count < max_retries:
421
+ wait_time = 60 * retry_count # 5分钟 * 重试次数
422
+ self.logger.info(f"Waiting {wait_time} seconds before retrying...")
423
+ time.sleep(wait_time)
424
+ else:
425
+ self.logger.error("Max retries reached, stopping crawler")
426
+
427
+ finally:
428
+ self.progress_conn.close()
429
+ self.output_conn.close()
430
+ self.logger.info("Crawler finished, database connections closed")
431
+
432
+
433
+ def main():
434
+ crawler = FollowingCrawlerV5(
435
+ api_key="Enter your API KEY",
436
+ input_db="following.db",
437
+ output_db="following_expanded.db"
438
+ )
439
+ crawler.run(batch_size=15) # Process 15 users at a time
440
+
441
+ if __name__ == "__main__":
442
+ main()