Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| import requests | |
| from bs4 import BeautifulSoup | |
| from fake_useragent import UserAgent | |
| import time | |
| import random | |
| # --- Page Config --- | |
| st.set_page_config(page_title="Unlimited OTP Cloud", page_icon="βοΈ", layout="wide") | |
| # --- Custom Styling --- | |
| st.markdown(""" | |
| <style> | |
| .stButton>button { width: 100%; border-radius: 8px; font-weight: bold; } | |
| .success-msg { color: #0f5132; background-color: #d1e7dd; padding: 10px; border-radius: 5px; } | |
| .sms-card { background-color: #f0f2f6; padding: 15px; border-radius: 10px; margin-bottom: 10px; border-left: 5px solid #ff4b4b; } | |
| </style> | |
| """, unsafe_allow_html=True) | |
| st.title("π Universal SMS Aggregator") | |
| st.caption("Powered by Hugging Face | Methods: Shelex-Style Scraping") | |
| # --- Sidebar: Source Selector --- | |
| st.sidebar.header("βοΈ Configuration") | |
| source = st.sidebar.radio( | |
| "Select Number Source:", | |
| ("Server 1 (AnonymSMS)", "Server 2 (Receive-SMS-Free)", "Server 3 (7Sim - Mixed)") | |
| ) | |
| ua = UserAgent() | |
| # --- Functions for Different Sources --- | |
| def get_headers(): | |
| return {'User-Agent': ua.random} | |
| # SOURCE 1: AnonymSMS | |
| def scrape_anonymsms_numbers(): | |
| url = "https://anonymsms.com/" | |
| try: | |
| resp = requests.get(url, headers=get_headers(), timeout=10) | |
| soup = BeautifulSoup(resp.text, 'html.parser') | |
| cards = soup.find_all('div', class_='number-card-content') | |
| data = [] | |
| for card in cards: | |
| num = card.find('div', class_='number-card-title').text.strip() | |
| link = card.find('a')['href'] | |
| data.append({"num": num, "link": link, "origin": "AnonymSMS"}) | |
| return data | |
| except Exception as e: | |
| return [] | |
| def scrape_anonymsms_msgs(link): | |
| try: | |
| resp = requests.get(link, headers=get_headers(), timeout=10) | |
| soup = BeautifulSoup(resp.text, 'html.parser') | |
| rows = soup.find_all('tr') | |
| msgs = [] | |
| for row in rows: | |
| cols = row.find_all('td') | |
| if len(cols) >= 2: | |
| sender = cols[0].text.strip() | |
| text = cols[1].text.strip() | |
| msgs.append({"from": sender, "msg": text}) | |
| return msgs | |
| except: | |
| return [] | |
| # SOURCE 2: Receive-SMS-Free.cc | |
| def scrape_rsf_numbers(): | |
| base_url = "https://receive-sms-free.cc" | |
| try: | |
| resp = requests.get(base_url, headers=get_headers(), timeout=10) | |
| soup = BeautifulSoup(resp.text, 'html.parser') | |
| # This site lists countries first, let's grab US numbers specifically for speed | |
| # Or grab top listed numbers on homepage if available | |
| # Scanning homepage for direct numbers: | |
| elements = soup.select('li.span3 a') # Typical structure | |
| data = [] | |
| for el in elements: | |
| # Filtering to ensure it's a number link | |
| href = el['href'] | |
| text = el.text.strip() | |
| if "Phone-Number" in href: | |
| full_link = base_url + href if href.startswith("/") else href | |
| data.append({"num": text, "link": full_link, "origin": "RecvSMSFree"}) | |
| return data[:20] # Return top 20 | |
| except: | |
| return [] | |
| def scrape_rsf_msgs(link): | |
| try: | |
| resp = requests.get(link, headers=get_headers(), timeout=10) | |
| soup = BeautifulSoup(resp.text, 'html.parser') | |
| # Structure often: div with class "c-message-mobile" or similar row | |
| # Let's try generic table/div parsing for this site | |
| msgs = [] | |
| rows = soup.select('.row.border-bottom') # Common on this site | |
| if not rows: | |
| rows = soup.find_all('div', class_='c-message') # Alt structure | |
| for row in rows: | |
| text_div = row.text.strip() | |
| # Basic parsing, might be messy, trying to clean | |
| if "min" in text_div or "sec" in text_div: | |
| lines = [line for line in text_div.split('\n') if line.strip()] | |
| if len(lines) >= 2: | |
| msgs.append({"from": lines[0], "msg": lines[-1]}) | |
| return msgs | |
| except: | |
| return [] | |
| # --- Main Logic --- | |
| if st.sidebar.button("π Refresh Servers"): | |
| st.rerun() | |
| st.subheader(f"π‘ Connected to: {source}") | |
| numbers = [] | |
| if "Server 1" in source: | |
| with st.spinner("Fetching from AnonymSMS..."): | |
| numbers = scrape_anonymsms_numbers() | |
| elif "Server 2" in source: | |
| with st.spinner("Fetching from Receive-SMS-Free..."): | |
| numbers = scrape_rsf_numbers() | |
| elif "Server 3" in source: | |
| st.info("Server 3 is currently under maintenance. Using Server 1 backup.") | |
| numbers = scrape_anonymsms_numbers() | |
| if not numbers: | |
| st.error("Numbers fetch nahi huye. Site block ho sakti hai ya down hai. Sidebar se dusra Server try karein.") | |
| else: | |
| # Grid Layout for Numbers | |
| st.success(f"Found {len(numbers)} Active Numbers") | |
| # Dropdown for cleaner mobile view | |
| options = [f"{n['origin']} - {n['num']}" for n in numbers] | |
| selected_option = st.selectbox("Select a Number:", options) | |
| # Get actual data | |
| idx = options.index(selected_option) | |
| selected_data = numbers[idx] | |
| st.markdown("---") | |
| col1, col2 = st.columns([3, 1]) | |
| with col1: | |
| st.markdown(f"### π± Inbox: `{selected_data['num']}`") | |
| with col2: | |
| refresh_msgs = st.button("π© Refresh Inbox") | |
| # Fetch Messages Logic | |
| if refresh_msgs or selected_option: | |
| messages = [] | |
| with st.spinner("Decrypting Messages..."): | |
| if "AnonymSMS" in selected_data['origin']: | |
| messages = scrape_anonymsms_msgs(selected_data['link']) | |
| elif "RecvSMSFree" in selected_data['origin']: | |
| messages = scrape_rsf_msgs(selected_data['link']) | |
| if messages: | |
| for m in messages[:10]: | |
| st.markdown(f""" | |
| <div class="sms-card"> | |
| <b>From:</b> {m['from']}<br> | |
| <code style="background:white; display:block; padding:5px; margin-top:5px;">{m['msg']}</code> | |
| </div> | |
| """, unsafe_allow_html=True) | |
| else: | |
| st.warning("π No messages found yet. Click 'Refresh Inbox'.") | |
| st.markdown("---") | |
| st.markdown("β οΈ *Disclaimer: All numbers are public. Do not use for bank OTPs.*") |