| | import streamlit as st |
| | import pandas as pd |
| | import requests |
| | from bs4 import BeautifulSoup |
| | from datasets import load_dataset |
| | import os |
| |
|
| | st.set_page_config(page_title="Nexora Hybrid Data Collector", layout="wide") |
| |
|
| | st.title("π§© Nexora Hybrid Data Collection Example") |
| | st.write("This app shows how Nexora could collect data from multiple sources: Manual Upload, Public Datasets, Scraping, and APIs.") |
| |
|
| | |
| | |
| | |
| | st.header("π 1. Upload Your Own CSV Dataset") |
| | uploaded_file = st.file_uploader("Choose a CSV file", type="csv") |
| |
|
| | if uploaded_file: |
| | user_data = pd.read_csv(uploaded_file) |
| | st.success("CSV Uploaded Successfully!") |
| | st.dataframe(user_data) |
| |
|
| | |
| | |
| | |
| | st.header("π 2. Pull Public Dataset (Hugging Face Example)") |
| | if st.button("Load 'Wine Quality' Dataset from Hugging Face"): |
| | dataset = load_dataset("wine-quality") |
| | wine_data = pd.DataFrame(dataset['train']) |
| | st.success("Public Dataset Loaded!") |
| | st.dataframe(wine_data.head()) |
| |
|
| | |
| | |
| | |
| | st.header("π 3. Scrape Data from Example Webpage (Quotes Example)") |
| | scrape_url = "http://quotes.toscrape.com/" |
| | response = requests.get(scrape_url) |
| | soup = BeautifulSoup(response.text, "html.parser") |
| | quotes = [quote.get_text() for quote in soup.find_all("span", class_="text")] |
| |
|
| | if quotes: |
| | st.success("Scraped Example Quotes from Website:") |
| | st.write(quotes[:5]) |
| |
|
| | |
| | |
| | |
| | st.header("π¦οΈ 4. Pull Weather Data via Free API (Open-Meteo Example)") |
| | latitude = st.number_input("Enter Latitude", value=40.71) |
| | longitude = st.number_input("Enter Longitude", value=-74.01) |
| |
|
| | if st.button("Get Weather Forecast"): |
| | weather_url = f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}&daily=temperature_2m_max&timezone=America/New_York" |
| | weather_response = requests.get(weather_url) |
| | |
| | if weather_response.status_code == 200: |
| | weather_data = weather_response.json() |
| | st.success("Weather Forecast Data:") |
| | st.json(weather_data) |
| | else: |
| | st.error("Failed to retrieve weather data.") |
| |
|
| | |
| | |
| | |
| | @st.cache_data |
| | def cached_scraper(url): |
| | response = requests.get(url) |
| | soup = BeautifulSoup(response.text, "html.parser") |
| | return [quote.get_text() for quote in soup.find_all("span", class_="text")] |
| |
|
| | st.header("β‘ Cached Scraper Example") |
| | if st.button("Scrape Again with Cache"): |
| | cached_quotes = cached_scraper(scrape_url) |
| | st.write(cached_quotes[:5]) |
| |
|
| | |
| | |
| | |
| | st.markdown("---") |
| | st.write("π Powered by Nexora Concept β Combining automation with your creativity.") |
| |
|
| |
|