Hashir Kashif
commited on
Commit
·
71a55ed
1
Parent(s):
4f424b2
asdasdasdasdasdsds
Browse files
app.py
CHANGED
|
@@ -15,7 +15,7 @@ import json
|
|
| 15 |
from urllib.parse import unquote
|
| 16 |
import paramiko
|
| 17 |
import fileinput
|
| 18 |
-
os.remove("./cookies/test.txt")
|
| 19 |
|
| 20 |
class Unbuffered(object):
|
| 21 |
def __init__(self, stream):
|
|
@@ -35,6 +35,7 @@ sys.stdout = Unbuffered(sys.stdout)
|
|
| 35 |
def send_retrieve_cookie_msg(channelid):
|
| 36 |
print("getting cookie")
|
| 37 |
secret = os.environ['DISCORD_CODE']
|
|
|
|
| 38 |
print(secret)
|
| 39 |
data = {"content": "c.gen netflix"}
|
| 40 |
headers = {"authorization": secret}
|
|
@@ -128,6 +129,10 @@ def getNetflixInfo(cookiefile):
|
|
| 128 |
billingDate = soup.find("div", {
|
| 129 |
"data-uia": "streaming-next-cycle"
|
| 130 |
})
|
|
|
|
|
|
|
|
|
|
|
|
|
| 131 |
print(billingDate)
|
| 132 |
billingDate = billingDate.get_text()
|
| 133 |
planName = soup.find("div", {"data-uia": "plan-name"})
|
|
@@ -143,12 +148,26 @@ def getNetflixInfo(cookiefile):
|
|
| 143 |
pass
|
| 144 |
planName = GoogleTranslator(source='auto',
|
| 145 |
target='en').translate(planName)
|
| 146 |
-
|
| 147 |
-
print(billingDate + " " + planName + " " + lang)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 148 |
S = 3
|
| 149 |
ran = ''.join(
|
| 150 |
random.choices(string.ascii_uppercase + string.digits, k=S))
|
| 151 |
-
cookieFileName = billingDate + " " + planName + " " + lang + " (" + str(ran) + ").txt"
|
| 152 |
newCookieFile = "../Membership/" + cookieFileName
|
| 153 |
shutil.move(
|
| 154 |
cookiefile, newCookieFile)
|
|
@@ -307,6 +326,6 @@ def sftp_test():
|
|
| 307 |
|
| 308 |
ppath = "/"
|
| 309 |
|
| 310 |
-
AutoIndex(app, browse_root=ppath)
|
| 311 |
# app.run()
|
| 312 |
|
|
|
|
| 15 |
from urllib.parse import unquote
|
| 16 |
import paramiko
|
| 17 |
import fileinput
|
| 18 |
+
# os.remove("./cookies/test.txt")
|
| 19 |
|
| 20 |
class Unbuffered(object):
|
| 21 |
def __init__(self, stream):
|
|
|
|
| 35 |
def send_retrieve_cookie_msg(channelid):
|
| 36 |
print("getting cookie")
|
| 37 |
secret = os.environ['DISCORD_CODE']
|
| 38 |
+
# secret = "NDkxOTgyMTQzNjMxOTE3MDU2.GCpHUY.KE2TfOK0LmVKnJyE3isogvXvE3YCNnnKvmAHHo"
|
| 39 |
print(secret)
|
| 40 |
data = {"content": "c.gen netflix"}
|
| 41 |
headers = {"authorization": secret}
|
|
|
|
| 129 |
billingDate = soup.find("div", {
|
| 130 |
"data-uia": "streaming-next-cycle"
|
| 131 |
})
|
| 132 |
+
NetflixLocation = soup.find("div", {
|
| 133 |
+
"data-uia": "loc"
|
| 134 |
+
})['lang']
|
| 135 |
+
print(NetflixLocation)
|
| 136 |
print(billingDate)
|
| 137 |
billingDate = billingDate.get_text()
|
| 138 |
planName = soup.find("div", {"data-uia": "plan-name"})
|
|
|
|
| 148 |
pass
|
| 149 |
planName = GoogleTranslator(source='auto',
|
| 150 |
target='en').translate(planName)
|
| 151 |
+
|
| 152 |
+
print(billingDate + " " + planName + " " + lang + " " +str(NetflixLocation))
|
| 153 |
+
try:
|
| 154 |
+
x = requests.get("https://help.netflix.com/en/node/123279/" +str(NetflixLocation[NetflixLocation.find("-")+1:]))
|
| 155 |
+
soup = BeautifulSoup(x.content, "html.parser")
|
| 156 |
+
extraMembebr = soup.findAll('p')
|
| 157 |
+
try:
|
| 158 |
+
for i in extraMembebr:
|
| 159 |
+
if("unavailable" in i.string):
|
| 160 |
+
# print(i)
|
| 161 |
+
extraMembebr = ""
|
| 162 |
+
|
| 163 |
+
except:
|
| 164 |
+
extraMembebr = "EM Available"
|
| 165 |
+
except:
|
| 166 |
+
pass
|
| 167 |
S = 3
|
| 168 |
ran = ''.join(
|
| 169 |
random.choices(string.ascii_uppercase + string.digits, k=S))
|
| 170 |
+
cookieFileName = billingDate + " " + planName + " " + lang + " "+str(NetflixLocation)+" "+extraMembebr+ " (" + str(ran) + ").txt"
|
| 171 |
newCookieFile = "../Membership/" + cookieFileName
|
| 172 |
shutil.move(
|
| 173 |
cookiefile, newCookieFile)
|
|
|
|
| 326 |
|
| 327 |
ppath = "/"
|
| 328 |
|
| 329 |
+
AutoIndex(app, browse_root=ppath,)
|
| 330 |
# app.run()
|
| 331 |
|