Adding inventory units
Browse files- apps/database_page.py +16 -23
- queries/process_all_db.py +4 -2
- queries/process_invunit.py +93 -0
- utils/check_sheet_exist.py +3 -0
- utils/utils_vars.py +1 -0
apps/database_page.py
CHANGED
|
@@ -7,6 +7,7 @@ from st_aggrid import AgGrid, ColumnsAutoSizeMode
|
|
| 7 |
from apps.dump_analysis import dump_analysis_space
|
| 8 |
from queries.process_all_db import process_all_tech_db, process_all_tech_db_with_stats
|
| 9 |
from queries.process_gsm import process_gsm_data_to_excel
|
|
|
|
| 10 |
from queries.process_lte import process_lte_data_to_excel
|
| 11 |
|
| 12 |
# from queries.process_mal import process_mal_data_to_excel
|
|
@@ -51,15 +52,12 @@ def download_button(database_type):
|
|
| 51 |
elif database_type == "NEI":
|
| 52 |
data = UtilsVars.neighbors_database
|
| 53 |
file_name = f"Neighbors databases_{datetime.now()}.xlsx"
|
| 54 |
-
# elif database_type == "TRX":
|
| 55 |
-
# data = UtilsVars.final_trx_database
|
| 56 |
-
# file_name = f"TRX database_{datetime.now()}.xlsx"
|
| 57 |
elif database_type == "MRBTS":
|
| 58 |
data = UtilsVars.final_mrbts_database
|
| 59 |
file_name = f"MRBTS database_{datetime.now()}.xlsx"
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
| 63 |
st.download_button(
|
| 64 |
type="primary",
|
| 65 |
label=f"Download {database_type} Database File",
|
|
@@ -111,6 +109,7 @@ if uploaded_file is not None:
|
|
| 111 |
and Technology.neighbors == False
|
| 112 |
and Technology.trx == False
|
| 113 |
and Technology.mrbts == False
|
|
|
|
| 114 |
):
|
| 115 |
st.error(
|
| 116 |
"""
|
|
@@ -121,7 +120,7 @@ if uploaded_file is not None:
|
|
| 121 |
"neighbors": ["ADCE", "ADJS", "ADJI", "ADJG", "ADJW", "BTS", "WCEL"],
|
| 122 |
"trx": ["TRX", "BTS"],
|
| 123 |
"mrbts": ["MRBTS"],
|
| 124 |
-
"
|
| 125 |
"""
|
| 126 |
)
|
| 127 |
|
|
@@ -131,7 +130,7 @@ if uploaded_file is not None:
|
|
| 131 |
and Technology.lte == True
|
| 132 |
and Technology.trx == True
|
| 133 |
and Technology.mrbts == True
|
| 134 |
-
and Technology.
|
| 135 |
):
|
| 136 |
DumpType.full_dump = True
|
| 137 |
with col1:
|
|
@@ -159,14 +158,6 @@ if uploaded_file is not None:
|
|
| 159 |
"Generate LTE DB",
|
| 160 |
on_click=lambda: process_database(process_lte_data_to_excel, "LTE"),
|
| 161 |
)
|
| 162 |
-
# if Technology.trx == True:
|
| 163 |
-
# with col5:
|
| 164 |
-
# st.button(
|
| 165 |
-
# "Generate TRX DB",
|
| 166 |
-
# on_click=lambda: process_database(
|
| 167 |
-
# process_trx_with_bts_name_data_to_excel, "TRX"
|
| 168 |
-
# ),
|
| 169 |
-
# )
|
| 170 |
if Technology.mrbts == True:
|
| 171 |
with col5:
|
| 172 |
st.button(
|
|
@@ -175,15 +166,17 @@ if uploaded_file is not None:
|
|
| 175 |
process_mrbts_data_to_excel, "MRBTS"
|
| 176 |
),
|
| 177 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 178 |
|
| 179 |
-
# if Technology.mal == True:
|
| 180 |
-
# with col7:
|
| 181 |
-
# st.button(
|
| 182 |
-
# "Generate MAL",
|
| 183 |
-
# on_click=lambda: process_database(process_mal_data_to_excel, "MAL"),
|
| 184 |
-
# )
|
| 185 |
if Technology.neighbors == True:
|
| 186 |
-
with
|
| 187 |
st.button(
|
| 188 |
"Generate NEI DB",
|
| 189 |
on_click=lambda: process_database(
|
|
|
|
| 7 |
from apps.dump_analysis import dump_analysis_space
|
| 8 |
from queries.process_all_db import process_all_tech_db, process_all_tech_db_with_stats
|
| 9 |
from queries.process_gsm import process_gsm_data_to_excel
|
| 10 |
+
from queries.process_invunit import process_invunit_data_to_excel
|
| 11 |
from queries.process_lte import process_lte_data_to_excel
|
| 12 |
|
| 13 |
# from queries.process_mal import process_mal_data_to_excel
|
|
|
|
| 52 |
elif database_type == "NEI":
|
| 53 |
data = UtilsVars.neighbors_database
|
| 54 |
file_name = f"Neighbors databases_{datetime.now()}.xlsx"
|
|
|
|
|
|
|
|
|
|
| 55 |
elif database_type == "MRBTS":
|
| 56 |
data = UtilsVars.final_mrbts_database
|
| 57 |
file_name = f"MRBTS database_{datetime.now()}.xlsx"
|
| 58 |
+
elif database_type == "INVUNIT":
|
| 59 |
+
data = UtilsVars.final_invunit_database
|
| 60 |
+
file_name = f"INVUNIT database_{datetime.now()}.xlsx"
|
| 61 |
st.download_button(
|
| 62 |
type="primary",
|
| 63 |
label=f"Download {database_type} Database File",
|
|
|
|
| 109 |
and Technology.neighbors == False
|
| 110 |
and Technology.trx == False
|
| 111 |
and Technology.mrbts == False
|
| 112 |
+
and Technology.invunit == False
|
| 113 |
):
|
| 114 |
st.error(
|
| 115 |
"""
|
|
|
|
| 120 |
"neighbors": ["ADCE", "ADJS", "ADJI", "ADJG", "ADJW", "BTS", "WCEL"],
|
| 121 |
"trx": ["TRX", "BTS"],
|
| 122 |
"mrbts": ["MRBTS"],
|
| 123 |
+
"invunit": ["INVUNIT"]
|
| 124 |
"""
|
| 125 |
)
|
| 126 |
|
|
|
|
| 130 |
and Technology.lte == True
|
| 131 |
and Technology.trx == True
|
| 132 |
and Technology.mrbts == True
|
| 133 |
+
and Technology.invunit == True
|
| 134 |
):
|
| 135 |
DumpType.full_dump = True
|
| 136 |
with col1:
|
|
|
|
| 158 |
"Generate LTE DB",
|
| 159 |
on_click=lambda: process_database(process_lte_data_to_excel, "LTE"),
|
| 160 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 161 |
if Technology.mrbts == True:
|
| 162 |
with col5:
|
| 163 |
st.button(
|
|
|
|
| 166 |
process_mrbts_data_to_excel, "MRBTS"
|
| 167 |
),
|
| 168 |
)
|
| 169 |
+
if Technology.invunit == True:
|
| 170 |
+
with col6:
|
| 171 |
+
st.button(
|
| 172 |
+
"Generate INVUNIT",
|
| 173 |
+
on_click=lambda: process_database(
|
| 174 |
+
process_invunit_data_to_excel, "INVUNIT"
|
| 175 |
+
),
|
| 176 |
+
)
|
| 177 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 178 |
if Technology.neighbors == True:
|
| 179 |
+
with col7:
|
| 180 |
st.button(
|
| 181 |
"Generate NEI DB",
|
| 182 |
on_click=lambda: process_database(
|
queries/process_all_db.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
| 1 |
from queries.process_gsm import combined_gsm_database, gsm_analaysis
|
|
|
|
| 2 |
from queries.process_lte import lte_fdd_analaysis, lte_tdd_analaysis, process_lte_data
|
| 3 |
from queries.process_mrbts import process_mrbts_data
|
| 4 |
from queries.process_wcdma import process_wcdma_data, wcdma_analaysis
|
|
@@ -16,6 +17,7 @@ def all_dbs(filepath: str):
|
|
| 16 |
process_wcdma_data(filepath)
|
| 17 |
process_lte_data(filepath),
|
| 18 |
process_mrbts_data(filepath)
|
|
|
|
| 19 |
|
| 20 |
|
| 21 |
def process_all_tech_db(filepath: str):
|
|
@@ -23,7 +25,7 @@ def process_all_tech_db(filepath: str):
|
|
| 23 |
|
| 24 |
UtilsVars.final_all_database = convert_dfs(
|
| 25 |
UtilsVars.all_db_dfs,
|
| 26 |
-
["GSM", "MAL", "TRX", "WCDMA", "LTE_FDD", "LTE_TDD", "MRBTS"],
|
| 27 |
)
|
| 28 |
|
| 29 |
|
|
@@ -41,5 +43,5 @@ def process_all_tech_db_with_stats(
|
|
| 41 |
lte_tdd_analaysis(filepath)
|
| 42 |
UtilsVars.final_all_database = convert_dfs(
|
| 43 |
UtilsVars.all_db_dfs,
|
| 44 |
-
["GSM", "MAL", "TRX", "WCDMA", "LTE_FDD", "LTE_TDD", "MRBTS"],
|
| 45 |
)
|
|
|
|
| 1 |
from queries.process_gsm import combined_gsm_database, gsm_analaysis
|
| 2 |
+
from queries.process_invunit import process_invunit_data
|
| 3 |
from queries.process_lte import lte_fdd_analaysis, lte_tdd_analaysis, process_lte_data
|
| 4 |
from queries.process_mrbts import process_mrbts_data
|
| 5 |
from queries.process_wcdma import process_wcdma_data, wcdma_analaysis
|
|
|
|
| 17 |
process_wcdma_data(filepath)
|
| 18 |
process_lte_data(filepath),
|
| 19 |
process_mrbts_data(filepath)
|
| 20 |
+
process_invunit_data(filepath)
|
| 21 |
|
| 22 |
|
| 23 |
def process_all_tech_db(filepath: str):
|
|
|
|
| 25 |
|
| 26 |
UtilsVars.final_all_database = convert_dfs(
|
| 27 |
UtilsVars.all_db_dfs,
|
| 28 |
+
["GSM", "MAL", "TRX", "WCDMA", "LTE_FDD", "LTE_TDD", "MRBTS", "INVUNIT"],
|
| 29 |
)
|
| 30 |
|
| 31 |
|
|
|
|
| 43 |
lte_tdd_analaysis(filepath)
|
| 44 |
UtilsVars.final_all_database = convert_dfs(
|
| 45 |
UtilsVars.all_db_dfs,
|
| 46 |
+
["GSM", "MAL", "TRX", "WCDMA", "LTE_FDD", "LTE_TDD", "MRBTS", "INVUNIT"],
|
| 47 |
)
|
queries/process_invunit.py
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
|
| 3 |
+
from utils.convert_to_excel import convert_dfs, save_dataframe
|
| 4 |
+
from utils.extract_code import extract_code_from_mrbts
|
| 5 |
+
from utils.utils_vars import UtilsVars
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def create_invunit_summary(df: pd.DataFrame) -> pd.DataFrame:
|
| 9 |
+
"""
|
| 10 |
+
Creates a summary string column in the given DataFrame by concatenating non-NaN values of all columns except the first one (MRBTS) into a single string with '/' as separator.
|
| 11 |
+
|
| 12 |
+
Args:
|
| 13 |
+
df (pd.DataFrame): The DataFrame to process.
|
| 14 |
+
|
| 15 |
+
Returns:
|
| 16 |
+
pd.DataFrame: The DataFrame with the added "invunit_summary" column.
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
def process_row(row):
|
| 20 |
+
values = []
|
| 21 |
+
for col in df.columns[1:]: # Exclude 'MRBTS'
|
| 22 |
+
if pd.notna(row[col]): # Check if value is not NaN
|
| 23 |
+
values.append(f"{int(row[col])} {col}") # Format as 'count column_name'
|
| 24 |
+
return "/".join(values) if values else ""
|
| 25 |
+
|
| 26 |
+
df["invunit_summary"] = df.apply(process_row, axis=1)
|
| 27 |
+
return df
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def process_invunit_data(file_path: str) -> pd.DataFrame:
|
| 31 |
+
"""
|
| 32 |
+
Process data from the specified file path.
|
| 33 |
+
|
| 34 |
+
Args:
|
| 35 |
+
file_path (str): The path to the file.
|
| 36 |
+
"""
|
| 37 |
+
dfs = pd.read_excel(
|
| 38 |
+
file_path,
|
| 39 |
+
sheet_name=["INVUNIT"],
|
| 40 |
+
engine="calamine",
|
| 41 |
+
skiprows=[0],
|
| 42 |
+
)
|
| 43 |
+
# Parse INVUNIT
|
| 44 |
+
df_invunit = dfs["INVUNIT"]
|
| 45 |
+
df_invunit.columns = df_invunit.columns.str.replace(r"[ ]", "", regex=True)
|
| 46 |
+
|
| 47 |
+
df_invunit = df_invunit[df_invunit["MRBTS"].apply(lambda x: str(x).isnumeric())]
|
| 48 |
+
df_invunit["CODE"] = df_invunit["MRBTS"].apply(extract_code_from_mrbts)
|
| 49 |
+
df_invunit = df_invunit[["MRBTS", "inventoryUnitType"]]
|
| 50 |
+
|
| 51 |
+
df_invunit = (
|
| 52 |
+
df_invunit.groupby(["MRBTS", "inventoryUnitType"])
|
| 53 |
+
.size()
|
| 54 |
+
.unstack(fill_value=None)
|
| 55 |
+
.reset_index()
|
| 56 |
+
)
|
| 57 |
+
# Rename columns
|
| 58 |
+
df_invunit = df_invunit.rename(
|
| 59 |
+
columns={
|
| 60 |
+
"ABIA AirScale Capacity": "ABIA",
|
| 61 |
+
"AMIA AirScale Indoor Subrack": "AMIA",
|
| 62 |
+
"AMOB AirScale Outdoor Subrack": "AMOB",
|
| 63 |
+
"ASIA AirScale Common": "ASIA",
|
| 64 |
+
"ASIB AirScale Common": "ASIB",
|
| 65 |
+
"BB Extension Outdoor Sub-Module FBBA": "FBBA",
|
| 66 |
+
"CORE_ASIA AirScale Common": "CORE_ASIA",
|
| 67 |
+
"CORE_ASIB AirScale Common": "CORE_ASIB",
|
| 68 |
+
"CORE_Flexi System Module Outdoor FSMF": "CORE_FSMF",
|
| 69 |
+
"CORE_SMOD": "CORE_SMOD",
|
| 70 |
+
"Flexi Baseband Sub-Module FBBC": "FBBC",
|
| 71 |
+
"Flexi System Module Outdoor FSMF": "FSMF",
|
| 72 |
+
"Not available": "NOT_AVAILABLE",
|
| 73 |
+
"SingleAntennaDevice": "SAD",
|
| 74 |
+
}
|
| 75 |
+
)
|
| 76 |
+
df_invunit = create_invunit_summary(df_invunit)
|
| 77 |
+
df_invunit["CODE"] = df_invunit["MRBTS"].apply(extract_code_from_mrbts)
|
| 78 |
+
df_invunit = df_invunit[["MRBTS", "CODE", "invunit_summary"]]
|
| 79 |
+
|
| 80 |
+
UtilsVars.all_db_dfs.append(df_invunit)
|
| 81 |
+
UtilsVars.all_db_dfs_names.append("INVUNIT")
|
| 82 |
+
return df_invunit
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def process_invunit_data_to_excel(file_path: str) -> None:
|
| 86 |
+
"""
|
| 87 |
+
Process data from the specified file path and save it to a excel file.
|
| 88 |
+
|
| 89 |
+
Args:
|
| 90 |
+
file_path (str): The path to the file.
|
| 91 |
+
"""
|
| 92 |
+
invunit_df = process_invunit_data(file_path)
|
| 93 |
+
UtilsVars.final_invunit_database = convert_dfs([invunit_df], ["INVUNIT"])
|
utils/check_sheet_exist.py
CHANGED
|
@@ -13,6 +13,7 @@ class Technology:
|
|
| 13 |
trx = False
|
| 14 |
mrbts = False
|
| 15 |
mal = False
|
|
|
|
| 16 |
|
| 17 |
|
| 18 |
# Dictionary of sheet groups to check
|
|
@@ -24,6 +25,7 @@ sheets_to_check = {
|
|
| 24 |
"trx": ["TRX", "BTS"],
|
| 25 |
"mrbts": ["MRBTS"],
|
| 26 |
"mal": ["MAL", "BTS"],
|
|
|
|
| 27 |
}
|
| 28 |
|
| 29 |
|
|
@@ -76,6 +78,7 @@ def execute_checks_sheets_exist(file_path):
|
|
| 76 |
Technology.neighbors = False
|
| 77 |
Technology.trx = False
|
| 78 |
Technology.mrbts = False
|
|
|
|
| 79 |
Technology.mal = False
|
| 80 |
DumpType.full_dump = False
|
| 81 |
for tech_attr, sheets in sheets_to_check.items():
|
|
|
|
| 13 |
trx = False
|
| 14 |
mrbts = False
|
| 15 |
mal = False
|
| 16 |
+
invunit = False
|
| 17 |
|
| 18 |
|
| 19 |
# Dictionary of sheet groups to check
|
|
|
|
| 25 |
"trx": ["TRX", "BTS"],
|
| 26 |
"mrbts": ["MRBTS"],
|
| 27 |
"mal": ["MAL", "BTS"],
|
| 28 |
+
"invunit": ["INVUNIT"],
|
| 29 |
}
|
| 30 |
|
| 31 |
|
|
|
|
| 78 |
Technology.neighbors = False
|
| 79 |
Technology.trx = False
|
| 80 |
Technology.mrbts = False
|
| 81 |
+
Technology.invunit = False
|
| 82 |
Technology.mal = False
|
| 83 |
DumpType.full_dump = False
|
| 84 |
for tech_attr, sheets in sheets_to_check.items():
|
utils/utils_vars.py
CHANGED
|
@@ -52,6 +52,7 @@ class UtilsVars:
|
|
| 52 |
final_wcdma_database = ""
|
| 53 |
final_trx_database = ""
|
| 54 |
final_mrbts_database = ""
|
|
|
|
| 55 |
final_mal_database = ""
|
| 56 |
gsm_dfs = []
|
| 57 |
wcdma_dfs = []
|
|
|
|
| 52 |
final_wcdma_database = ""
|
| 53 |
final_trx_database = ""
|
| 54 |
final_mrbts_database = ""
|
| 55 |
+
final_invunit_database = ""
|
| 56 |
final_mal_database = ""
|
| 57 |
gsm_dfs = []
|
| 58 |
wcdma_dfs = []
|