Spaces:
Sleeping
Sleeping
| from flask import Flask, render_template, redirect, url_for | |
| import requests | |
| from bs4 import BeautifulSoup | |
| from base64 import b64decode | |
| from Crypto.Cipher import AES | |
| from Crypto.Util.Padding import unpad | |
| from selenium import webdriver | |
| from selenium.webdriver.chrome.service import Service | |
| from selenium.webdriver.chrome.options import Options | |
| from selenium.webdriver.common.by import By | |
| from selenium.webdriver.support.ui import WebDriverWait | |
| from selenium.webdriver.support import expected_conditions as EC | |
| from webdriver_manager.chrome import ChromeDriverManager | |
| import time | |
| app = Flask(__name__, static_url_path='/static', static_folder='static') | |
| def create_driver(): | |
| """Create and configure Chrome WebDriver""" | |
| chrome_options = Options() | |
| chrome_options.add_argument("--headless") # Run in background | |
| chrome_options.add_argument("--no-sandbox") | |
| chrome_options.add_argument("--disable-dev-shm-usage") | |
| chrome_options.add_argument("--disable-gpu") | |
| chrome_options.add_argument("--window-size=1920,1080") | |
| chrome_options.add_argument("--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36") | |
| service = Service(ChromeDriverManager().install()) | |
| driver = webdriver.Chrome(service=service, options=chrome_options) | |
| return driver | |
| def homeFetch(): | |
| driver = create_driver() | |
| try: | |
| driver.get('https://www.shemaroome.com/') | |
| # Wait for the page to load | |
| WebDriverWait(driver, 10).until( | |
| EC.presence_of_element_located((By.CLASS_NAME, "masthead-card")) | |
| ) | |
| # Get page source and parse with BeautifulSoup | |
| page_source = driver.page_source | |
| soup = BeautifulSoup(page_source, features="html5lib") | |
| # fetches all slider images | |
| slider = soup.find_all("div", {"class": "masthead-card"}) | |
| sliderImages = [] | |
| for element in slider: | |
| tempImage = element.find("img") | |
| if tempImage: | |
| sliderImages.append(tempImage['src']) | |
| # fetches all catagories with images | |
| catagory = soup.find_all( | |
| "div", {"class": "float-left w-100 slick-container slick-gap"}) | |
| catagoryObject = [] | |
| for element in catagory: | |
| tempTitle = element.find("h2") | |
| if tempTitle != None: | |
| tempTitle = str(tempTitle).split(">")[1].split("<")[0] | |
| tempImages = element.find_all("a") | |
| tempImagesArr = [] | |
| for image in tempImages: | |
| try: | |
| imgLink = image.find("img")["src"] | |
| except: | |
| imgLink = "" | |
| tempImagesArr.append([image["href"], imgLink]) | |
| catagoryObject.append([tempTitle, tempImagesArr]) | |
| return sliderImages, catagoryObject | |
| finally: | |
| driver.quit() | |
| def movieDetailFetch(title): | |
| driver = create_driver() | |
| try: | |
| driver.get('https://www.shemaroome.com/' + title) | |
| # Wait for the main content to load | |
| WebDriverWait(driver, 10).until( | |
| EC.presence_of_element_located((By.CLASS_NAME, "main-content")) | |
| ) | |
| page_source = driver.page_source | |
| soup = BeautifulSoup(page_source, features="html5lib") | |
| pathList = [] | |
| pathsContainer = soup.find("section", {"class": "main-content"}) | |
| if pathsContainer and pathsContainer.find("ul"): | |
| paths = pathsContainer.find("ul").find_all("li") | |
| for path in paths: | |
| if path.find("a"): | |
| pathList.append([path.find("a")["href"], path.text.strip()]) | |
| title_element = soup.find( | |
| "h1", { | |
| "class": | |
| "float-left w-100 app-color1 font-black margin-bottom-10 section-title2" | |
| }) | |
| title = title_element.text if title_element else "" | |
| catagoriesArr = [] | |
| catagories = soup.find_all( | |
| "li", {"class": "float-left font-regular app-color5 app-color1"}) | |
| for catagory in catagories: | |
| catagoriesArr.append(catagory.text.strip()) | |
| movieDataArr = [] | |
| Synopsis = soup.find_all( | |
| "p", {"class": "float-left w-100 app-color1 font-regular"}) | |
| for data in Synopsis: | |
| movieDataArr.append(data.text.strip()) | |
| youMayLikeArr = [] | |
| youMayLikeContainer = soup.find( | |
| "div", {"class": "float-left w-100 app-slick-slider-container"}) | |
| if youMayLikeContainer: | |
| youMayLike = youMayLikeContainer.find_all("a") | |
| for data in youMayLike: | |
| img_element = data.find("img") | |
| if img_element: | |
| youMayLikeArr.append([data["href"], img_element["src"]]) | |
| return { | |
| "pathList": pathList, | |
| "title": title, | |
| "catagoriesArr": catagoriesArr, | |
| "movieDataArr": movieDataArr, | |
| "youMayLikeArr": youMayLikeArr | |
| } | |
| finally: | |
| driver.quit() | |
| def showDetailFetch(title): | |
| driver = create_driver() | |
| try: | |
| driver.get('https://www.shemaroome.com/' + title) | |
| # Wait for the main content to load | |
| WebDriverWait(driver, 10).until( | |
| EC.presence_of_element_located((By.CLASS_NAME, "main-content")) | |
| ) | |
| page_source = driver.page_source | |
| soup = BeautifulSoup(page_source, features="html5lib") | |
| pathsContainer = soup.find("section", {"class": "main-content"}) | |
| pathList = [] | |
| if pathsContainer and pathsContainer.find("ul"): | |
| paths = pathsContainer.find("ul").find_all("li") | |
| for path in paths: | |
| if path.find("a"): | |
| pathList.append([path.find("a")["href"], path.text.strip()]) | |
| title_element = soup.find( | |
| "h1", { | |
| "class": | |
| "float-left w-100 app-color1 font-black margin-bottom-10 section-title2" | |
| }) | |
| title = title_element.text.strip() if title_element else "" | |
| catagories = soup.find_all("li", | |
| {"class": "float-left font-regular app-color5"}) | |
| catagoriesArr = [] | |
| for catagory in catagories: | |
| catagoriesArr.append(catagory.text.strip()) | |
| Synopsis = soup.find_all( | |
| "p", {"class": "float-left w-100 app-color1 font-regular"}) | |
| movieDataArr = [] | |
| for data in Synopsis: | |
| movieDataArr.append(data.text.strip()) | |
| episodesArr = [] | |
| episodeContainers = soup.find_all( | |
| "div", {"class": "float-left w-100 app-slick-slider-container"}) | |
| if len(episodeContainers) > 0: | |
| episodes = episodeContainers[0].find_all("a") | |
| for episode in episodes: | |
| img_element = episode.find("img") | |
| if img_element: | |
| episodesArr.append([episode["href"], img_element["src"]]) | |
| youMayLikeArr = [] | |
| if len(episodeContainers) > 1: | |
| youMayLike = episodeContainers[1].find_all("a") | |
| for data in youMayLike: | |
| img_element = data.find("img") | |
| if img_element: | |
| youMayLikeArr.append([data["href"], img_element["src"]]) | |
| poster = "" | |
| poster_element = soup.find( | |
| "div", { | |
| "class": "player_section w-100 embed-responsive embed-responsive-16by9" | |
| }) | |
| if poster_element: | |
| img_element = poster_element.find("img") | |
| if img_element: | |
| poster = img_element["src"] | |
| return { | |
| "pathList": pathList, | |
| "title": title, | |
| "catagoriesArr": catagoriesArr, | |
| "movieDataArr": movieDataArr, | |
| "episodesArr": episodesArr, | |
| "youMayLikeArr": youMayLikeArr, | |
| "poster": poster | |
| } | |
| finally: | |
| driver.quit() | |
| def decryptLink(encrypted, key, type): | |
| key = b64decode(key) | |
| iv = b'0000000000000000' | |
| ct = b64decode(encrypted) | |
| cipher = AES.new(key, AES.MODE_CBC, iv) | |
| pt = unpad(cipher.decrypt(ct), AES.block_size) | |
| link = pt.decode() | |
| tempUrl = "https://d1fcqrzxghru70.cloudfront.net/" + \ | |
| link.split("cloudfront.net/")[1] | |
| response = requests.request("GET", tempUrl) | |
| tempArr = response.text.split("RESOLUTION=") | |
| tempArr.pop(0) | |
| tempUrl2 = '/'.join(tempUrl.split("/")[:-1]) | |
| bestResolution = 0 | |
| for i in range(len(tempArr)): | |
| if int(tempArr[i].split("x")[0]) > int( | |
| tempArr[bestResolution].split("x")[0]): | |
| bestResolution = i | |
| resolutionLink = tempUrl2 + "/" + tempArr[bestResolution].split("\n")[-2] | |
| return resolutionLink | |
| def stremKeyAPI(catalog_id, content_id, item_category, content_definition): | |
| url = "https://www.shemaroome.com/users/user_all_lists" | |
| payload = 'catalog_id='+catalog_id+'&content_id='+content_id + \ | |
| '&category='+item_category+'&content_def='+content_definition | |
| response = requests.request("POST", url, data=payload) | |
| try: | |
| return { | |
| "streamKey": response.json()['stream_key'], | |
| "key": response.json()['key'], | |
| "newPlayUrl": response.json()['new_play_url'], | |
| "ios_key": response.json()['ios_key'], | |
| "ios_play_url": response.json()['ios_play_url'], | |
| "subtitle": response.json()['subtitle'] | |
| } | |
| except: | |
| return {"error": "There's an error in data."} | |
| def pageLoderAPI(title): | |
| driver = create_driver() | |
| try: | |
| driver.get("https://www.shemaroome.com/" + title) | |
| # Wait for the page to load | |
| WebDriverWait(driver, 10).until( | |
| EC.presence_of_element_located((By.ID, "catalog_id")) | |
| ) | |
| page_source = driver.page_source | |
| soup = BeautifulSoup(page_source, features="html5lib") | |
| catalog_id_elem = soup.find("input", {"id": "catalog_id"}) | |
| content_id_elem = soup.find("input", {"id": "content_id"}) | |
| item_category_elem = soup.find("input", {"id": "item_category"}) | |
| content_definition_elem = soup.find("input", {"id": "content_definition"}) | |
| if not all([catalog_id_elem, content_id_elem, item_category_elem, content_definition_elem]): | |
| return {"error": "Required elements not found on page."} | |
| return { | |
| "catalog_id": catalog_id_elem['value'], | |
| "content_id": content_id_elem['value'], | |
| "item_category": item_category_elem['value'], | |
| "content_definition": content_definition_elem['value'] | |
| } | |
| except Exception as e: | |
| return {"error": f"There's an error in URL: {str(e)}"} | |
| finally: | |
| driver.quit() | |
| def home(): | |
| sliderImages, catagoryObject = homeFetch() | |
| return render_template('home.html', | |
| sliderImages=sliderImages, | |
| catagoryObject=catagoryObject) | |
| def hello(): | |
| return "Hello world from Shemaroome!" | |
| def movieDetail(title): | |
| try: | |
| dataObj = movieDetailFetch("movies/" + title) | |
| contentObj = pageLoderAPI("movies/" + title) | |
| keyData = stremKeyAPI(contentObj["catalog_id"], contentObj["content_id"], | |
| contentObj["item_category"], "AVOD") | |
| movieUrl = decryptLink(keyData["ios_play_url"], keyData["ios_key"], "movie") | |
| return render_template('detailMovie.html', | |
| dataObj=dataObj, | |
| movieUrl=movieUrl) | |
| except: | |
| return redirect(url_for('home')) | |
| def detailsGujaratiPlays(title): | |
| try: | |
| dataObj = movieDetailFetch("gujarati-plays/" + title) | |
| contentObj = pageLoderAPI("gujarati-plays/" + title) | |
| keyData = stremKeyAPI(contentObj["catalog_id"], contentObj["content_id"], | |
| contentObj["item_category"], "AVOD") | |
| movieUrl = decryptLink(keyData["ios_play_url"], keyData["ios_key"], "movie") | |
| return render_template('detailsGujaratiPlays.html', | |
| dataObj=dataObj, | |
| movieUrl=movieUrl) | |
| except: | |
| return redirect(url_for('home')) | |
| def detailShowHome(title): | |
| try: | |
| dataObj = showDetailFetch("shows/" + title) | |
| return render_template('detailShowHome.html', dataObj=dataObj) | |
| except: | |
| return redirect(url_for('home')) | |
| def detailShowEpisode(title, episode): | |
| try: | |
| dataObj = showDetailFetch("shows/" + title + "/" + episode) | |
| contentObj = pageLoderAPI("shows/" + title + "/" + episode) | |
| keyData = stremKeyAPI(contentObj["catalog_id"], contentObj["content_id"], | |
| contentObj["item_category"], "AVOD") | |
| movieUrl = decryptLink(keyData["ios_play_url"], keyData["ios_key"], "show") | |
| return render_template('detailShowEpisode.html', | |
| dataObj=dataObj, | |
| movieUrl=movieUrl) | |
| except: | |
| return redirect(url_for('home')) | |
| app.run(host="0.0.0.0", port="7860", debug="true") |