izuemon commited on
Commit
fe97e6f
·
verified ·
1 Parent(s): 490219d

Update watcher.py

Browse files
Files changed (1) hide show
  1. watcher.py +70 -91
watcher.py CHANGED
@@ -35,6 +35,8 @@ HEADERS_POST = {
35
 
36
  # ===== ssyoutube =====
37
  SSYOUTUBE_URL = "https://ssyoutube.online/yt-video-detail/"
 
 
38
 
39
  # ===== Utils =====
40
  def parse_updated_at(value):
@@ -55,8 +57,8 @@ def extract_youtube_id(text):
55
  return m.group(1)
56
  return None
57
 
58
- # ===== ssyoutube HTML解析 & nonce取得 =====
59
- def fetch_download_links_and_nonce(youtube_url):
60
  res = requests.post(
61
  SSYOUTUBE_URL,
62
  data={"videoURL": youtube_url},
@@ -69,95 +71,25 @@ def fetch_download_links_and_nonce(youtube_url):
69
  res.raise_for_status()
70
 
71
  soup = BeautifulSoup(res.text, "lxml")
72
-
73
- # ★ wp nonce 取得
74
- nonce_el = soup.select_one('meta[name="wp-nonce"], input[name="nonce"]')
75
- nonce = nonce_el["content"] if nonce_el and nonce_el.has_attr("content") else nonce_el["value"]
76
-
77
  buttons = soup.select("button[data-url]")
78
- items = []
79
 
 
80
  for btn in buttons:
81
  url = btn.get("data-url")
82
- quality = btn.get("data-quality")
83
- has_audio = btn.get("data-has-audio")
84
 
85
  if not url:
86
  continue
87
 
88
- items.append({
89
  "url": url,
90
- "quality": quality,
91
  "has_audio": has_audio,
92
  })
93
 
94
- return items, nonce
95
-
96
- def pick_best_video_and_audio(items):
97
- video_candidates = []
98
- audio_candidates = []
99
-
100
- for item in items:
101
- if item["has_audio"] == "false" and item["quality"]:
102
- q = int(re.sub(r"\D", "", item["quality"]))
103
- video_candidates.append((q, item))
104
- if item["quality"] in (None, "audio"):
105
- audio_candidates.append(item)
106
-
107
- if not video_candidates or not audio_candidates:
108
- return None, None
109
-
110
- best_video = sorted(video_candidates, key=lambda x: x[0], reverse=True)[0][1]
111
- best_audio = audio_candidates[0]
112
-
113
- return best_video, best_audio
114
-
115
- def merge_video_audio(video, audio, nonce, youtube_id, quality):
116
- ajax_url = "https://ssyoutube.online/wp-admin/admin-ajax.php"
117
-
118
- request_data = {
119
- "id": f"{youtube_id}_{quality}",
120
- "ttl": 3600000,
121
- "inputs": [
122
- {
123
- "url": audio["url"],
124
- "ext": "mp4",
125
- "chunkDownload": {"type": "header", "size": 52428800, "concurrency": 3}
126
- },
127
- {
128
- "url": video["url"],
129
- "ext": "mp4",
130
- "chunkDownload": {"type": "header", "size": 52428800, "concurrency": 3}
131
- }
132
- ],
133
- "output": {
134
- "ext": "mp4",
135
- "downloadName": f"{youtube_id}_{quality}.mp4",
136
- "chunkUpload": {"size": 104857600, "concurrency": 3}
137
- },
138
- "operation": {"type": "replace_audio_in_video"}
139
- }
140
-
141
- files = {
142
- "action": (None, "process_video_merge"),
143
- "nonce": (None, nonce),
144
- "request_data": (None, json.dumps(request_data)),
145
- }
146
-
147
- res = requests.post(ajax_url, files=files, timeout=30)
148
- res.raise_for_status()
149
- return res.json()
150
 
151
- def wait_for_merge_done(monitor_http):
152
- while True:
153
- res = requests.get(monitor_http, timeout=15)
154
- res.raise_for_status()
155
- data = res.json()
156
- if data["result"]["status"] == "done":
157
- return data["result"]["output"]["url"]
158
- time.sleep(5)
159
-
160
- # ===== Build & Send =====
161
  def build_links(items):
162
  lines = []
163
  for item in items:
@@ -179,7 +111,12 @@ def build_links(items):
179
  def send_to_channel(text):
180
  payload = {
181
  "requestId": f"desk-web-{int(time.time() * 1000)}",
182
- "blocks": [{"type": "text", "value": text}],
 
 
 
 
 
183
  "buttons": None,
184
  "form": None,
185
  "webPage": None,
@@ -195,11 +132,53 @@ def send_to_channel(text):
195
  )
196
  res.raise_for_status()
197
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
198
  # ===== Main =====
199
  def main():
200
  while True:
201
  try:
202
- res = requests.get(GET_URL, headers=HEADERS_GET, params=PARAMS, timeout=30)
 
 
 
 
 
203
  res.raise_for_status()
204
 
205
  messages = res.json().get("messages", [])
@@ -209,6 +188,7 @@ def main():
209
  for msg in messages:
210
  plain_text = msg.get("plainText")
211
  updated_at = msg.get("updatedAt")
 
212
  if not plain_text or updated_at is None:
213
  continue
214
 
@@ -232,25 +212,24 @@ def main():
232
 
233
  youtube_url = f"https://www.youtube.com/watch?v={youtube_id}"
234
 
235
- items, nonce = fetch_download_links_and_nonce(youtube_url)
236
  if not items:
237
  print("ダウンロードリンクが取得できませんでした")
238
  time.sleep(10)
239
  continue
240
 
241
- video, audio = pick_best_video_and_audio(items)
242
- if not video or not audio:
243
- print("動画または音声が取得できませんでした")
244
- time.sleep(10)
245
- continue
246
 
247
- merge_res = merge_video_audio(video, audio, nonce, youtube_id, video["quality"])
248
- monitor_http = merge_res["data"]["result"]["monitor"]["http"]
249
- final_video_url = wait_for_merge_done(monitor_http)
 
 
 
250
 
251
  message_text = build_links(items)
252
- message_text += f'\n<link type="url" value="{final_video_url}">🎬 音声付き {video["quality"]}</link>'
253
-
254
  send_to_channel(message_text)
255
  print("送信完了")
256
 
 
35
 
36
  # ===== ssyoutube =====
37
  SSYOUTUBE_URL = "https://ssyoutube.online/yt-video-detail/"
38
+ SSYOUTUBE_MERGE_URL = "https://ssyoutube.online/wp-admin/admin-ajax.php"
39
+ SSYOUTUBE_NONCE = "82b3e4b0cd" # 必要に応じて変更
40
 
41
  # ===== Utils =====
42
  def parse_updated_at(value):
 
57
  return m.group(1)
58
  return None
59
 
60
+ # ===== ssyoutube HTML 解析 =====
61
+ def fetch_download_links(youtube_url):
62
  res = requests.post(
63
  SSYOUTUBE_URL,
64
  data={"videoURL": youtube_url},
 
71
  res.raise_for_status()
72
 
73
  soup = BeautifulSoup(res.text, "lxml")
 
 
 
 
 
74
  buttons = soup.select("button[data-url]")
 
75
 
76
+ results = []
77
  for btn in buttons:
78
  url = btn.get("data-url")
79
+ quality = btn.get("data-quality") # 例: 1080p / 720p / None
80
+ has_audio = btn.get("data-has-audio") # "true" / "false" / None
81
 
82
  if not url:
83
  continue
84
 
85
+ results.append({
86
  "url": url,
87
+ "quality": quality or "audio",
88
  "has_audio": has_audio,
89
  })
90
 
91
+ return results
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
92
 
 
 
 
 
 
 
 
 
 
 
93
  def build_links(items):
94
  lines = []
95
  for item in items:
 
111
  def send_to_channel(text):
112
  payload = {
113
  "requestId": f"desk-web-{int(time.time() * 1000)}",
114
+ "blocks": [
115
+ {
116
+ "type": "text",
117
+ "value": text
118
+ }
119
+ ],
120
  "buttons": None,
121
  "form": None,
122
  "webPage": None,
 
132
  )
133
  res.raise_for_status()
134
 
135
+ # ===== 動画 + 音声結合 =====
136
+ def merge_video_audio(video_url, audio_url, output_name):
137
+ payload = {
138
+ "action": "process_video_merge",
139
+ "nonce": SSYOUTUBE_NONCE,
140
+ "request_data": json.dumps({
141
+ "id": f"{output_name}_merge",
142
+ "ttl": 3600000,
143
+ "inputs": [
144
+ {"url": audio_url, "ext": "mp4"},
145
+ {"url": video_url, "ext": "mp4"}
146
+ ],
147
+ "output": {
148
+ "ext": "mp4",
149
+ "downloadName": f"{output_name}.mp4",
150
+ "chunkUpload": {"size": 104857600, "concurrency": 3}
151
+ },
152
+ "operation": {"type": "replace_audio_in_video"}
153
+ })
154
+ }
155
+
156
+ res = requests.post(
157
+ SSYOUTUBE_MERGE_URL,
158
+ data=payload,
159
+ headers={
160
+ "User-Agent": "Mozilla/5.0",
161
+ "Referer": "https://ssyoutube.online/yt-video-detail/"
162
+ },
163
+ timeout=30
164
+ )
165
+ res.raise_for_status()
166
+ result = res.json().get("data", {}).get("result", {})
167
+ monitor_url = result.get("monitor", {}).get("http")
168
+ if not monitor_url:
169
+ raise RuntimeError("動画結合 API が返す monitor URL が取得できませんでした")
170
+ return monitor_url
171
+
172
  # ===== Main =====
173
  def main():
174
  while True:
175
  try:
176
+ res = requests.get(
177
+ GET_URL,
178
+ headers=HEADERS_GET,
179
+ params=PARAMS,
180
+ timeout=30,
181
+ )
182
  res.raise_for_status()
183
 
184
  messages = res.json().get("messages", [])
 
188
  for msg in messages:
189
  plain_text = msg.get("plainText")
190
  updated_at = msg.get("updatedAt")
191
+
192
  if not plain_text or updated_at is None:
193
  continue
194
 
 
212
 
213
  youtube_url = f"https://www.youtube.com/watch?v={youtube_id}"
214
 
215
+ items = fetch_download_links(youtube_url)
216
  if not items:
217
  print("ダウンロードリンクが取得できませんでした")
218
  time.sleep(10)
219
  continue
220
 
221
+ # 映像のみと音声のみを取得
222
+ video_items = [i for i in items if i["has_audio"] == "false"]
223
+ audio_items = [i for i in items if i["has_audio"] == "true" and i["quality"] == "audio"]
 
 
224
 
225
+ if video_items and audio_items:
226
+ # 最も高画質な動画
227
+ video_item = sorted(video_items, key=lambda x: int(re.sub(r"\D", "", x["quality"] or "0")), reverse=True)[0]
228
+ audio_item = audio_items[0]
229
+ merged_url = merge_video_audio(video_item["url"], audio_item["url"], f"yt_{youtube_id}")
230
+ items.append({"url": merged_url, "quality": "merged", "has_audio": "true"})
231
 
232
  message_text = build_links(items)
 
 
233
  send_to_channel(message_text)
234
  print("送信完了")
235