import streamlit as st import boto3 import pandas as pd # Define AWS credentials and S3 resources ACCESS_KEY = st.secrets["ACCESS_KEY"] SECRET_KEY = st.secrets["SECRET_KEY"] REGION_NAME = "eu-west-2" BUCKET_NAME = st.secrets["BUCKET_NAME"] # Define unique codes for each folder FOLDER_CODES = { "flutterwave": st.secrets["flutterwave_code"], "canvassing": st.secrets["canvassing_code"], "test": st.secrets["canvassing_code"], # Add more folders and codes here... } # Define subfolders for each folder SUBFOLDERS = { "flutterwave": ["teammate-performance", "product"], "canvassing": ["agent-performance-vicidial"], } # Connect to S3 client s3_client = boto3.client( "s3", aws_access_key_id=ACCESS_KEY, aws_secret_access_key=SECRET_KEY, region_name=REGION_NAME, ) def preview_data(input_data, folder, subfolder): if folder=='canvassing' and subfolder=='agent-performance-vicidial': return pd.read_csv(input_data,header=3, nrows=10) elif folder=='flutterwave' and subfolder=='teammate-performance': return pd.read_csv(input_data,header=3, nrows=10) elif folder=='flutterwave' and subfolder=='product': return pd.read_csv(input_data,low_memory=False, nrows=10) # Get a list of folders in the bucket folders = [obj["Key"].split("/")[0] for obj in s3_client.list_objects_v2(Bucket=BUCKET_NAME)["Contents"]] # Streamlit app st.title("Upload CSV File to S3 Bucket") # Upload file uploaded_file = st.file_uploader("Choose a CSV file", type=".csv") # Check if file is None or not a CSV file if uploaded_file and uploaded_file.type != "text/csv": st.error("Only CSV files are allowed!") uploaded_file = None # Show the rest of the app only if a valid CSV file is uploaded if uploaded_file: # Choose folder selected_folder = st.selectbox("Select Campaign", set(folders)) # Check if folder has subfolders if selected_folder in SUBFOLDERS: # Show subfolder selection selected_subfolder = st.selectbox("Select folder", SUBFOLDERS[selected_folder]) else: selected_subfolder = None # Preview data # Read a few rows of data data = preview_data(uploaded_file, selected_folder, selected_subfolder) # Show data preview st.subheader("Data preview: Please Confirm the Data Before Proceeding") st.write(data) # Input unique code unique_code = st.text_input("Enter your unique code") # Upload button upload_button = st.button("Upload") if upload_button: # Validate unique code if FOLDER_CODES.get(selected_folder) != unique_code: st.error("Invalid unique code for selected folder") else: # Read entire uploaded file data = pd.read_csv(uploaded_file) # Create filename based on folder and subfolder if selected_subfolder: filename = f"{selected_folder}/{selected_subfolder}/{uploaded_file.name}" else: filename = f"{selected_folder}/{uploaded_file.name}" try: # Upload notification st.info("Please wait, upload is in progress...") # Upload file to S3 s3_client.put_object(Body=uploaded_file, Bucket=BUCKET_NAME, Key=filename) st.empty() st.success("File uploaded successfully!") except: st.write("An Error Occured") # Clear file input and unique code uploaded_file = None unique_code = "" # # Choose folder # selected_folder = st.selectbox("Select folder", folders) # # # Input unique code # unique_code = st.text_input("Enter unique code") # # # Upload button # upload_button = st.button("Upload") # # if upload_button: # # Validate unique code # if FOLDER_CODES.get(selected_folder) != unique_code: # st.error("Invalid unique code for selected folder") # else: # # Read uploaded file # data = pd.read_csv(uploaded_file) # # # Create filename # filename = f"{selected_folder}/{uploaded_file.name}" # # # Upload file to S3 # s3_client.put_object(Body=data.to_csv(), Bucket=BUCKET_NAME, Key=filename) # # st.success("File uploaded successfully!") # # # Clear file input and unique code # uploaded_file = None # unique_code = ""