izuemon commited on
Commit
80d2524
·
verified ·
1 Parent(s): 0b72d24

Update watcher.py

Browse files
Files changed (1) hide show
  1. watcher.py +113 -77
watcher.py CHANGED
@@ -6,7 +6,7 @@ import requests
6
  from datetime import datetime, timezone
7
  from bs4 import BeautifulSoup
8
 
9
- # ===== Channel.io 設定 =====
10
  GET_URL = "https://desk-api.channel.io/desk/channels/200605/groups/519217/messages"
11
  POST_URL = GET_URL
12
 
@@ -34,13 +34,14 @@ HEADERS_POST = {
34
  }
35
 
36
  # ===== ssyoutube =====
37
- SSYOUTUBE_URL = "https://ssyoutube.online/yt-video-detail/"
 
38
 
39
  # ===== Utils =====
40
  def parse_updated_at(value):
41
  if isinstance(value, (int, float)):
42
  return datetime.fromtimestamp(value / 1000, tz=timezone.utc)
43
- elif isinstance(value, str):
44
  return datetime.fromisoformat(value.replace("Z", "+00:00"))
45
  return None
46
 
@@ -55,16 +56,31 @@ def extract_youtube_id(text):
55
  return m.group(1)
56
  return None
57
 
58
- # ===== ssyoutube HTML 解析 =====
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
59
  def fetch_download_links(youtube_url):
60
  res = requests.post(
61
- SSYOUTUBE_URL,
62
  data={"videoURL": youtube_url},
63
- timeout=30,
64
  headers={
65
  "User-Agent": "Mozilla/5.0",
66
- "Referer": "https://ssyoutube.online/",
67
- }
 
68
  )
69
  res.raise_for_status()
70
 
@@ -74,59 +90,91 @@ def fetch_download_links(youtube_url):
74
  results = []
75
  for btn in buttons:
76
  url = btn.get("data-url")
77
- quality = btn.get("data-quality") # 例: 1080p / 720p / None
78
- has_audio = btn.get("data-has-audio") # "true" / "false" / None
 
 
 
 
 
 
 
79
 
80
- if not url:
81
- continue
 
82
 
83
- results.append({
84
- "url": url,
85
- "quality": quality or "audio",
86
- "has_audio": has_audio,
87
- })
88
 
89
- return results
 
90
 
91
- def build_links(items):
92
- lines = []
93
- for item in items:
94
- url = item["url"]
95
- quality = item["quality"]
96
- has_audio = item["has_audio"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
97
 
98
- audio_label = ""
99
- if has_audio == "false":
100
- audio_label = "(映像のみ)"
101
- elif has_audio == "true":
102
- audio_label = "(音声付き)"
103
 
104
- line = f'<link type="url" value="{url}"> {quality} {audio_label}</link>'
105
- lines.append(line)
106
 
 
 
 
 
 
 
 
 
 
107
  return "\n".join(lines)
108
 
109
  def send_to_channel(text):
110
  payload = {
111
- "requestId": f"desk-web-{int(time.time() * 1000)}",
112
- "blocks": [
113
- {
114
- "type": "text",
115
- "value": text
116
- }
117
- ],
118
- "buttons": None,
119
- "form": None,
120
- "webPage": None,
121
- "files": None,
122
- "customPayload": None
123
  }
124
-
125
  res = requests.post(
126
  POST_URL,
127
  headers=HEADERS_POST,
128
  data=json.dumps(payload),
129
- timeout=30
130
  )
131
  res.raise_for_status()
132
 
@@ -134,39 +182,24 @@ def send_to_channel(text):
134
  def main():
135
  while True:
136
  try:
137
- res = requests.get(
138
- GET_URL,
139
- headers=HEADERS_GET,
140
- params=PARAMS,
141
- timeout=30,
142
- )
143
  res.raise_for_status()
144
-
145
  messages = res.json().get("messages", [])
146
- latest_msg = None
147
- latest_time = None
148
-
149
- for msg in messages:
150
- plain_text = msg.get("plainText")
151
- updated_at = msg.get("updatedAt")
152
-
153
- if not plain_text or updated_at is None:
154
- continue
155
-
156
- t = parse_updated_at(updated_at)
157
- if not t:
158
- continue
159
 
160
- if latest_time is None or t > latest_time:
161
- latest_time = t
162
- latest_msg = msg
 
 
 
 
 
163
 
164
- if not latest_msg:
165
  time.sleep(10)
166
  continue
167
 
168
- text = latest_msg["plainText"]
169
- youtube_id = extract_youtube_id(text)
170
  if not youtube_id:
171
  time.sleep(10)
172
  continue
@@ -174,13 +207,16 @@ def main():
174
  youtube_url = f"https://www.youtube.com/watch?v={youtube_id}"
175
 
176
  items = fetch_download_links(youtube_url)
177
- if not items:
178
- print("ダウンロードリンクが取得できませんでした")
179
- time.sleep(10)
180
- continue
 
 
 
181
 
182
- message_text = build_links(items)
183
- send_to_channel(message_text)
184
  print("送信完了")
185
 
186
  except Exception as e:
 
6
  from datetime import datetime, timezone
7
  from bs4 import BeautifulSoup
8
 
9
+ # ===== Channel.io =====
10
  GET_URL = "https://desk-api.channel.io/desk/channels/200605/groups/519217/messages"
11
  POST_URL = GET_URL
12
 
 
34
  }
35
 
36
  # ===== ssyoutube =====
37
+ DETAIL_URL = "https://ssyoutube.online/yt-video-detail/"
38
+ MERGE_API = "https://ssyoutube.online/wp-admin/admin-ajax.php"
39
 
40
  # ===== Utils =====
41
  def parse_updated_at(value):
42
  if isinstance(value, (int, float)):
43
  return datetime.fromtimestamp(value / 1000, tz=timezone.utc)
44
+ if isinstance(value, str):
45
  return datetime.fromisoformat(value.replace("Z", "+00:00"))
46
  return None
47
 
 
56
  return m.group(1)
57
  return None
58
 
59
+ # ===== ssyoutube =====
60
+ def fetch_nonce():
61
+ res = requests.get(DETAIL_URL, headers={"User-Agent": "Mozilla/5.0"}, timeout=30)
62
+ res.raise_for_status()
63
+ soup = BeautifulSoup(res.text, "lxml")
64
+
65
+ nonce = soup.select_one('input[name="nonce"]')
66
+ if nonce:
67
+ return nonce["value"]
68
+
69
+ m = re.search(r'"nonce":"([a-f0-9]+)"', res.text)
70
+ if m:
71
+ return m.group(1)
72
+
73
+ raise RuntimeError("nonce が取得できません")
74
+
75
  def fetch_download_links(youtube_url):
76
  res = requests.post(
77
+ DETAIL_URL,
78
  data={"videoURL": youtube_url},
 
79
  headers={
80
  "User-Agent": "Mozilla/5.0",
81
+ "Referer": DETAIL_URL,
82
+ },
83
+ timeout=30,
84
  )
85
  res.raise_for_status()
86
 
 
90
  results = []
91
  for btn in buttons:
92
  url = btn.get("data-url")
93
+ quality = btn.get("data-quality")
94
+ has_audio = btn.get("data-has-audio")
95
+ if url:
96
+ results.append({
97
+ "url": url,
98
+ "quality": quality or "audio",
99
+ "has_audio": has_audio,
100
+ })
101
+ return results
102
 
103
+ def pick_best_streams(items):
104
+ videos = [i for i in items if i["has_audio"] == "false"]
105
+ audios = [i for i in items if i["has_audio"] != "false"]
106
 
107
+ def vkey(x):
108
+ m = re.search(r"(\d+)p", x["quality"])
109
+ return int(m.group(1)) if m else 0
 
 
110
 
111
+ best_video = sorted(videos, key=vkey, reverse=True)[0]
112
+ best_audio = audios[0]
113
 
114
+ return best_video, best_audio
115
+
116
+ def start_merge(video, audio, youtube_id, quality, nonce):
117
+ payload = {
118
+ "id": f"{youtube_id}_{quality}",
119
+ "ttl": 3600000,
120
+ "inputs": [
121
+ {"url": audio["url"], "ext": "mp4"},
122
+ {"url": video["url"], "ext": "mp4"},
123
+ ],
124
+ "output": {
125
+ "ext": "mp4",
126
+ "downloadName": f"{youtube_id}_{quality}.mp4",
127
+ },
128
+ "operation": {"type": "replace_audio_in_video"},
129
+ }
130
+
131
+ files = {
132
+ "action": (None, "process_video_merge"),
133
+ "nonce": (None, nonce),
134
+ "request_data": (None, json.dumps(payload)),
135
+ }
136
+
137
+ res = requests.post(
138
+ MERGE_API,
139
+ files=files,
140
+ headers={"User-Agent": "Mozilla/5.0"},
141
+ timeout=30,
142
+ )
143
+ res.raise_for_status()
144
+ return res.json()
145
+
146
+ def wait_merge_done(monitor_url):
147
+ while True:
148
+ res = requests.get(monitor_url, timeout=15)
149
+ res.raise_for_status()
150
+ data = res.json()
151
 
152
+ if data["result"]["status"] == "done":
153
+ return data["result"]["output"]["url"]
 
 
 
154
 
155
+ time.sleep(5)
 
156
 
157
+ def build_links(items, merged_url, quality):
158
+ lines = []
159
+ for i in items:
160
+ lines.append(
161
+ f'<link type="url" value="{i["url"]}"> {i["quality"]}</link>'
162
+ )
163
+ lines.append(
164
+ f'\n<link type="url" value="{merged_url}"> 🔊音声付き {quality}</link>'
165
+ )
166
  return "\n".join(lines)
167
 
168
  def send_to_channel(text):
169
  payload = {
170
+ "requestId": f"desk-{int(time.time()*1000)}",
171
+ "blocks": [{"type": "text", "value": text}],
 
 
 
 
 
 
 
 
 
 
172
  }
 
173
  res = requests.post(
174
  POST_URL,
175
  headers=HEADERS_POST,
176
  data=json.dumps(payload),
177
+ timeout=30,
178
  )
179
  res.raise_for_status()
180
 
 
182
  def main():
183
  while True:
184
  try:
185
+ res = requests.get(GET_URL, headers=HEADERS_GET, params=PARAMS, timeout=30)
 
 
 
 
 
186
  res.raise_for_status()
 
187
  messages = res.json().get("messages", [])
 
 
 
 
 
 
 
 
 
 
 
 
 
188
 
189
+ latest = max(
190
+ (
191
+ m for m in messages
192
+ if m.get("plainText") and m.get("updatedAt")
193
+ ),
194
+ key=lambda m: parse_updated_at(m["updatedAt"]),
195
+ default=None,
196
+ )
197
 
198
+ if not latest:
199
  time.sleep(10)
200
  continue
201
 
202
+ youtube_id = extract_youtube_id(latest["plainText"])
 
203
  if not youtube_id:
204
  time.sleep(10)
205
  continue
 
207
  youtube_url = f"https://www.youtube.com/watch?v={youtube_id}"
208
 
209
  items = fetch_download_links(youtube_url)
210
+ best_video, best_audio = pick_best_streams(items)
211
+
212
+ nonce = fetch_nonce()
213
+ merge = start_merge(best_video, best_audio, youtube_id, best_video["quality"], nonce)
214
+
215
+ monitor = merge["data"]["result"]["monitor"]["http"]
216
+ merged_url = wait_merge_done(monitor)
217
 
218
+ message = build_links(items, merged_url, best_video["quality"])
219
+ send_to_channel(message)
220
  print("送信完了")
221
 
222
  except Exception as e: