| import re |
| import requests |
| from bs4 import BeautifulSoup |
| from deep_translator import GoogleTranslator |
| import glob |
| import os |
| import shutil |
| import string |
| import random |
| from flask import Flask, abort, send_file, render_template |
| from flask_autoindex import AutoIndex |
| from threading import Thread |
| import time |
| import json |
| from urllib.parse import unquote |
| os.remove("./cookies/test.txt") |
| def send_retrieve_cookie_msg(channelid): |
| print("getting cookie") |
| secret = os.environ['DISCORD_CODE'] |
| print(secret) |
| data = {"content": "c.gen netflix"} |
| headers = {"authorization": secret} |
| r = requests.post( |
| "https://discord.com/api/v9/channels/1186365728278990938/messages", |
| data=data, |
| headers=headers) |
| time.sleep(10) |
| os.chdir("./cookies/") |
| if r.status_code != 429: |
| headers = {'authorization': secret} |
| r = requests.get( |
| f' https://discord.com/api/v9/channels/{channelid}/messages?limit=1', |
| headers=headers) |
| jsonn = json.loads(r.text) |
| |
|
|
| for value in jsonn: |
| |
| S = 6 |
| ran = ''.join(random.choices(string.ascii_uppercase + string.digits, |
| k=S)) |
| |
| try: |
| attachmentUrl = value['attachments'][0]['url'] |
| filename = str(ran) + ".txt" |
| response = requests.get(attachmentUrl) |
| open("old" + filename, "wb").write(response.content) |
| s = 1 |
| with open("old" + filename, 'r') as r, open(filename, 'w') as o: |
| for line in r: |
| |
| if line.strip(): |
| if (s == 1): |
| o.write("# Netscape HTTP Cookie File\n") |
| s = 2 |
| o.write(line) |
| os.remove("old" + filename) |
| except Exception as e: |
|
|
| print(e) |
| pass |
| else: |
| print("cooldown") |
|
|
| os.chdir("../") |
|
|
|
|
| def parseCookieFile(cookiefile): |
| x = "" |
| """Parse a cookies.txt file and return a dictionary of key value pairs |
| compatible with requests.""" |
|
|
| cookies = {} |
| with open(cookiefile, 'r') as fp: |
| for line in fp: |
| if not re.match(r'^\#', line): |
| lineFields = line.strip().split('\t') |
| try: |
| cookies[lineFields[5]] = lineFields[6] |
|
|
| except: |
| print("Invalid cookie") |
| x = "fail" |
| if x == "fail": |
| return "fail" |
| else: |
| return cookies |
|
|
|
|
| def getNetflixInfo(cookiefile): |
| cookies = parseCookieFile(cookiefile) |
| |
| if cookies != "fail": |
|
|
| r = requests.get("https://www.netflix.com/BillingActivity", |
| cookies=cookies) |
| print(r.url) |
| f = open("netflixWebCode.txt", "w") |
| f.write(str(r.content)) |
| f.close() |
| if "login" in r.url: |
| print("Login Page") |
| os.remove(cookiefile) |
| else: |
| soup = BeautifulSoup(r.content, "html.parser") |
| try: |
| print(r.url) |
| billingDate = soup.find("div", { |
| "data-uia": "streaming-next-cycle" |
| }) |
| print(billingDate) |
| billingDate = billingDate.get_text() |
| planName = soup.find("div", {"data-uia": "plan-name"}) |
| print(planName) |
| planName = planName.get_text() |
| billingDate = GoogleTranslator(source='auto', |
| target='en').translate(billingDate) |
| try: |
| lang = soup.find("html", {"lang": 'en'}).get_text() |
| lang = "English" |
| except: |
| lang = "" |
| pass |
| planName = GoogleTranslator(source='auto', |
| target='en').translate(planName) |
|
|
| print(billingDate + " " + planName + " " + lang) |
| S = 3 |
| ran = ''.join( |
| random.choices(string.ascii_uppercase + string.digits, k=S)) |
| shutil.move( |
| cookiefile, "../Membership/" + billingDate + " " + planName + " " + |
| lang + " (" + str(ran) + ").txt") |
|
|
| except Exception as e: |
| print(e) |
| print("\n Moving in noMember") |
| S = 10 |
| ran = ''.join( |
| random.choices(string.ascii_uppercase + string.digits, k=S)) |
| shutil.move(cookiefile, '../NoMembership/NoMember' + str(ran) + ".txt") |
| else: |
| os.remove(cookiefile) |
| print("cookie removed") |
|
|
|
|
| def mainTask(): |
| print("running cookie retriever") |
| send_retrieve_cookie_msg(1191381755731644418) |
| time.sleep(5) |
| print("running netflix checker") |
| os.chdir('./cookies/') |
| for fileName in glob.glob("*.txt"): |
| print(fileName) |
| getNetflixInfo(fileName) |
| os.chdir('../') |
|
|
| def testTask(): |
| getNetflixInfo("./cookietest/cookie.txt") |
|
|
| app = Flask(__name__) |
|
|
|
|
| @app.route('/', defaults={'req_path': ''}) |
| @app.route('/<path:req_path>') |
| def dir_listing(req_path): |
| BASE_DIR = './' |
| |
| abs_path = os.path.join(BASE_DIR, req_path) |
| abs_path = unquote(abs_path) |
| |
|
|
| |
| if os.path.isfile(abs_path): |
| return send_file(abs_path) |
|
|
| |
| if not os.path.exists(abs_path): |
| return str(abs_path) |
| |
| |
| |
| files = os.listdir(abs_path) |
| return render_template('files.html', files=files) |
|
|
|
|
| @app.route('/alive') |
| def alive(): |
| return "Keep Alive" |
|
|
|
|
| @app.route('/main') |
| def main(): |
| |
| thr = Thread(target=mainTask) |
| thr.start() |
| |
| return 'Hello from Flask!' |
| @app.route('/test') |
| def test(): |
| thr = Thread(target=testTask) |
| |
| thr.start() |
| |
| return 'Hello from Flask! test' |
|
|
|
|
| ppath = "/" |
|
|
| AutoIndex(app, browse_root=ppath) |
| |
|
|
|
|