File size: 4,402 Bytes
495be8b
 
 
 
 
5e0b390
 
495be8b
5e0b390
495be8b
 
 
5e0b390
 
 
495be8b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
import streamlit as st
import boto3
import pandas as pd

# Define AWS credentials and S3 resources
ACCESS_KEY = st.secrets["ACCESS_KEY"]
SECRET_KEY = st.secrets["SECRET_KEY"]
REGION_NAME = "eu-west-2"
BUCKET_NAME = st.secrets["BUCKET_NAME"]

# Define unique codes for each folder
FOLDER_CODES = {
    "flutterwave": st.secrets["flutterwave_code"],
    "canvassing": st.secrets["canvassing_code"],
    "test": st.secrets["canvassing_code"],
    # Add more folders and codes here...
}

# Define subfolders for each folder
SUBFOLDERS = {
    "flutterwave": ["teammate-performance", "product"],
    "canvassing": ["agent-performance-vicidial"],
}

# Connect to S3 client
s3_client = boto3.client(
    "s3",
    aws_access_key_id=ACCESS_KEY,
    aws_secret_access_key=SECRET_KEY,
    region_name=REGION_NAME,
)

def preview_data(input_data, folder, subfolder):
    if folder=='canvassing' and subfolder=='agent-performance-vicidial':
        return pd.read_csv(input_data,header=3, nrows=10)

    elif folder=='flutterwave' and subfolder=='teammate-performance':
        return pd.read_csv(input_data,header=3, nrows=10)

    elif folder=='flutterwave' and subfolder=='product':
        return pd.read_csv(input_data,low_memory=False, nrows=10)


# Get a list of folders in the bucket
folders = [obj["Key"].split("/")[0] for obj in s3_client.list_objects_v2(Bucket=BUCKET_NAME)["Contents"]]

# Streamlit app
st.title("Upload CSV File to S3 Bucket")

# Upload file
uploaded_file = st.file_uploader("Choose a CSV file", type=".csv")

# Check if file is None or not a CSV file
if uploaded_file and uploaded_file.type != "text/csv":
    st.error("Only CSV files are allowed!")
    uploaded_file = None

# Show the rest of the app only if a valid CSV file is uploaded
if uploaded_file:

    # Choose folder
    selected_folder = st.selectbox("Select Campaign", set(folders))

    # Check if folder has subfolders
    if selected_folder in SUBFOLDERS:
        # Show subfolder selection
        selected_subfolder = st.selectbox("Select folder", SUBFOLDERS[selected_folder])
    else:
        selected_subfolder = None

    # Preview data
    # Read a few rows of data
    data = preview_data(uploaded_file, selected_folder, selected_subfolder)

    # Show data preview
    st.subheader("Data preview: Please Confirm the Data Before Proceeding")
    st.write(data)

    # Input unique code
    unique_code = st.text_input("Enter your unique code")

    # Upload button
    upload_button = st.button("Upload")

    if upload_button:

        # Validate unique code
        if FOLDER_CODES.get(selected_folder) != unique_code:
            st.error("Invalid unique code for selected folder")
        else:
            # Read entire uploaded file
            data = pd.read_csv(uploaded_file)

            # Create filename based on folder and subfolder
            if selected_subfolder:
                filename = f"{selected_folder}/{selected_subfolder}/{uploaded_file.name}"
            else:
                filename = f"{selected_folder}/{uploaded_file.name}"

            try:
                # Upload notification
                st.info("Please wait, upload is in progress...")

                # Upload file to S3
                s3_client.put_object(Body=uploaded_file, Bucket=BUCKET_NAME, Key=filename)
                st.empty()
                st.success("File uploaded successfully!")

            except:
                st.write("An Error Occured")



            # Clear file input and unique code
            uploaded_file = None
            unique_code = ""

# # Choose folder
# selected_folder = st.selectbox("Select folder", folders)
#
# # Input unique code
# unique_code = st.text_input("Enter unique code")
#
# # Upload button
# upload_button = st.button("Upload")
#
# if upload_button:
#     # Validate unique code
#     if FOLDER_CODES.get(selected_folder) != unique_code:
#         st.error("Invalid unique code for selected folder")
#     else:
#         # Read uploaded file
#         data = pd.read_csv(uploaded_file)
#
#         # Create filename
#         filename = f"{selected_folder}/{uploaded_file.name}"
#
#         # Upload file to S3
#         s3_client.put_object(Body=data.to_csv(), Bucket=BUCKET_NAME, Key=filename)
#
#         st.success("File uploaded successfully!")
#
#         # Clear file input and unique code
#         uploaded_file = None
#         unique_code = ""