Add CIQ 3G Generator with WBTS/WCEL sheet generation, refactor CIQ 2G to extract shared site parsing logic with MCC/MNC parameters, implement TRX sheet builder with BCCH/TRX frequency parsing and MAIO assignment, add BTS sheet builder with template name detection and sector ID mapping, and create MAL sheet builder with mobile allocation frequency extraction from CIQ brut Excel
Browse files- app.py +1 -0
- apps/ciq_2g_generator.py +9 -1
- apps/ciq_3g_generator.py +56 -0
- queries/process_ciq_2g.py +436 -47
- queries/process_ciq_3g.py +276 -0
app.py
CHANGED
|
@@ -119,6 +119,7 @@ if check_password():
|
|
| 119 |
"apps/parameters_distribution.py", title="📊Parameters distribution"
|
| 120 |
),
|
| 121 |
st.Page("apps/ciq_2g_generator.py", title="🧾 CIQ 2G Generator"),
|
|
|
|
| 122 |
st.Page("apps/core_dump_page.py", title="📠Parse dump core"),
|
| 123 |
st.Page("apps/gps_converter.py", title="🧭GPS Converter"),
|
| 124 |
st.Page("apps/distance.py", title="🛰Distance Calculator"),
|
|
|
|
| 119 |
"apps/parameters_distribution.py", title="📊Parameters distribution"
|
| 120 |
),
|
| 121 |
st.Page("apps/ciq_2g_generator.py", title="🧾 CIQ 2G Generator"),
|
| 122 |
+
st.Page("apps/ciq_3g_generator.py", title="🧾 CIQ 3G Generator"),
|
| 123 |
st.Page("apps/core_dump_page.py", title="📠Parse dump core"),
|
| 124 |
st.Page("apps/gps_converter.py", title="🧭GPS Converter"),
|
| 125 |
st.Page("apps/distance.py", title="🛰Distance Calculator"),
|
apps/ciq_2g_generator.py
CHANGED
|
@@ -13,6 +13,12 @@ with col2:
|
|
| 13 |
"Upload CIQ brut 2G (Excel)", type=["xlsx", "xls"], key="ciq2g_ciq"
|
| 14 |
)
|
| 15 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
if dump_file is None or ciq_file is None:
|
| 17 |
st.info("Upload dump xlsb + CIQ brut Excel to generate CIQ 2G.")
|
| 18 |
st.stop()
|
|
@@ -20,7 +26,9 @@ if dump_file is None or ciq_file is None:
|
|
| 20 |
if st.button("Generate", type="primary"):
|
| 21 |
try:
|
| 22 |
with st.spinner("Generating CIQ 2G... (dump is heavy)"):
|
| 23 |
-
sheets, excel_bytes = generate_ciq_2g_excel(
|
|
|
|
|
|
|
| 24 |
st.session_state["ciq2g_sheets"] = sheets
|
| 25 |
st.session_state["ciq2g_excel_bytes"] = excel_bytes
|
| 26 |
st.success("CIQ 2G generated")
|
|
|
|
| 13 |
"Upload CIQ brut 2G (Excel)", type=["xlsx", "xls"], key="ciq2g_ciq"
|
| 14 |
)
|
| 15 |
|
| 16 |
+
col3, col4 = st.columns(2)
|
| 17 |
+
with col3:
|
| 18 |
+
mcc = st.number_input("MCC", value=610, step=1, min_value=0, key="ciq2g_mcc")
|
| 19 |
+
with col4:
|
| 20 |
+
mnc = st.number_input("MNC", value=2, step=1, min_value=0, key="ciq2g_mnc")
|
| 21 |
+
|
| 22 |
if dump_file is None or ciq_file is None:
|
| 23 |
st.info("Upload dump xlsb + CIQ brut Excel to generate CIQ 2G.")
|
| 24 |
st.stop()
|
|
|
|
| 26 |
if st.button("Generate", type="primary"):
|
| 27 |
try:
|
| 28 |
with st.spinner("Generating CIQ 2G... (dump is heavy)"):
|
| 29 |
+
sheets, excel_bytes = generate_ciq_2g_excel(
|
| 30 |
+
dump_file, ciq_file, mcc=int(mcc), mnc=int(mnc)
|
| 31 |
+
)
|
| 32 |
st.session_state["ciq2g_sheets"] = sheets
|
| 33 |
st.session_state["ciq2g_excel_bytes"] = excel_bytes
|
| 34 |
st.success("CIQ 2G generated")
|
apps/ciq_3g_generator.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
import streamlit as st
|
| 3 |
+
|
| 4 |
+
from queries.process_ciq_3g import generate_ciq_3g_excel
|
| 5 |
+
|
| 6 |
+
st.title("CIQ 3G Generator")
|
| 7 |
+
|
| 8 |
+
ciq_file = st.file_uploader(
|
| 9 |
+
"Upload CIQ brut 3G (Excel)", type=["xlsx", "xls"], key="ciq3g_ciq"
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
col1, col2 = st.columns(2)
|
| 13 |
+
with col1:
|
| 14 |
+
year_suffix = st.text_input("Year suffix", value="25", key="ciq3g_year")
|
| 15 |
+
with col2:
|
| 16 |
+
bands = st.text_input(
|
| 17 |
+
"Bands string",
|
| 18 |
+
value="G9G18U9U21L8L18L26",
|
| 19 |
+
key="ciq3g_bands",
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
if ciq_file is None:
|
| 23 |
+
st.info("Upload CIQ brut 3G Excel to generate CIQ 3G (WBTS + WCEL).")
|
| 24 |
+
st.stop()
|
| 25 |
+
|
| 26 |
+
if st.button("Generate", type="primary"):
|
| 27 |
+
try:
|
| 28 |
+
with st.spinner("Generating CIQ 3G..."):
|
| 29 |
+
sheets, excel_bytes = generate_ciq_3g_excel(
|
| 30 |
+
ciq_file, year_suffix=year_suffix.strip(), bands=bands.strip()
|
| 31 |
+
)
|
| 32 |
+
st.session_state["ciq3g_sheets"] = sheets
|
| 33 |
+
st.session_state["ciq3g_excel_bytes"] = excel_bytes
|
| 34 |
+
st.success("CIQ 3G generated")
|
| 35 |
+
except Exception as e:
|
| 36 |
+
st.error(f"Error: {e}")
|
| 37 |
+
|
| 38 |
+
sheets = st.session_state.get("ciq3g_sheets")
|
| 39 |
+
excel_bytes = st.session_state.get("ciq3g_excel_bytes")
|
| 40 |
+
|
| 41 |
+
if sheets:
|
| 42 |
+
tab_names = list(sheets.keys())
|
| 43 |
+
tabs = st.tabs(tab_names)
|
| 44 |
+
for t, name in zip(tabs, tab_names):
|
| 45 |
+
with t:
|
| 46 |
+
df: pd.DataFrame = sheets[name]
|
| 47 |
+
st.dataframe(df, use_container_width=True)
|
| 48 |
+
|
| 49 |
+
if excel_bytes:
|
| 50 |
+
st.download_button(
|
| 51 |
+
label="Download CIQ 3G Excel",
|
| 52 |
+
data=excel_bytes,
|
| 53 |
+
file_name="CIQ_3G.xlsx",
|
| 54 |
+
mime="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
| 55 |
+
type="primary",
|
| 56 |
+
)
|
queries/process_ciq_2g.py
CHANGED
|
@@ -7,6 +7,25 @@ import pandas as pd
|
|
| 7 |
|
| 8 |
REQUIRED_DUMP_BTS_COLS = ["BSC", "BCF", "BTS", "usedMobileAllocation"]
|
| 9 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 10 |
|
| 11 |
def _normalize_col(col: object) -> str:
|
| 12 |
return re.sub(r"[^0-9A-Za-z]", "", str(col))
|
|
@@ -84,6 +103,36 @@ def _parse_site_number(site: object) -> int:
|
|
| 84 |
return int(m.group(1)) if m else 0
|
| 85 |
|
| 86 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 87 |
def _extract_band_and_sector(cell_name: object) -> tuple[Optional[str], Optional[int]]:
|
| 88 |
if not isinstance(cell_name, str):
|
| 89 |
return None, None
|
|
@@ -157,28 +206,22 @@ def _needed_bts_ids_from_site_rows(
|
|
| 157 |
return tuple(sorted(ids))
|
| 158 |
|
| 159 |
|
| 160 |
-
def _parse_ciq_sites(
|
| 161 |
-
|
| 162 |
-
|
| 163 |
-
|
| 164 |
-
|
| 165 |
-
|
| 166 |
-
|
| 167 |
-
|
| 168 |
-
|
| 169 |
-
|
|
|
|
|
|
|
| 170 |
if missing:
|
| 171 |
raise ValueError(f"CIQ brut is missing required columns: {missing}")
|
| 172 |
|
| 173 |
-
df =
|
| 174 |
-
|
| 175 |
-
df["site_number"] = df["Sites"].apply(_parse_site_number)
|
| 176 |
-
df["BSC ID"] = pd.to_numeric(df["BSC ID"], errors="coerce")
|
| 177 |
-
df["Nbre_TRE_DR"] = pd.to_numeric(df["Nbre_TRE_DR"], errors="coerce")
|
| 178 |
-
|
| 179 |
-
bands_sectors = df["NOM_CELLULE"].apply(_extract_band_and_sector)
|
| 180 |
-
df["band"] = bands_sectors.apply(lambda x: x[0])
|
| 181 |
-
df["sector"] = bands_sectors.apply(lambda x: x[1])
|
| 182 |
|
| 183 |
sites: list[_PlannedSite] = []
|
| 184 |
|
|
@@ -215,21 +258,8 @@ def _parse_ciq_sites(ciq_file) -> list[_PlannedSite]:
|
|
| 215 |
|
| 216 |
|
| 217 |
def _assign_bcfs(
|
| 218 |
-
dump_bts: pd.DataFrame, planned_sites: list[_PlannedSite],
|
| 219 |
) -> list[_PlannedSite]:
|
| 220 |
-
if hasattr(ciq_file, "seek"):
|
| 221 |
-
ciq_file.seek(0)
|
| 222 |
-
|
| 223 |
-
ciq_df = pd.read_excel(ciq_file, engine="calamine")
|
| 224 |
-
ciq_df.columns = ciq_df.columns.astype(str).str.strip()
|
| 225 |
-
ciq_df = ciq_df[["Sites", "NOM_CELLULE", "Nbre_TRE_DR", "Nom BSC", "BSC ID"]].copy()
|
| 226 |
-
|
| 227 |
-
ciq_df["BSC ID"] = pd.to_numeric(ciq_df["BSC ID"], errors="coerce")
|
| 228 |
-
ciq_df["Nbre_TRE_DR"] = pd.to_numeric(ciq_df["Nbre_TRE_DR"], errors="coerce")
|
| 229 |
-
|
| 230 |
-
bands_sectors = ciq_df["NOM_CELLULE"].apply(_extract_band_and_sector)
|
| 231 |
-
ciq_df["band"] = bands_sectors.apply(lambda x: x[0])
|
| 232 |
-
ciq_df["sector"] = bands_sectors.apply(lambda x: x[1])
|
| 233 |
|
| 234 |
dump_bts = dump_bts.dropna(subset=["BSC"])
|
| 235 |
|
|
@@ -318,9 +348,15 @@ def _assign_bcfs(
|
|
| 318 |
|
| 319 |
def build_bcf_sheet(dump_file, ciq_file) -> pd.DataFrame:
|
| 320 |
dump_bts = _read_dump_bts_required_columns(dump_file)
|
| 321 |
-
|
| 322 |
-
|
|
|
|
|
|
|
| 323 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 324 |
rows = []
|
| 325 |
for i, s in enumerate(assigned_sites, start=1):
|
| 326 |
rows.append(
|
|
@@ -334,24 +370,377 @@ def build_bcf_sheet(dump_file, ciq_file) -> pd.DataFrame:
|
|
| 334 |
"Configuration": s.configuration,
|
| 335 |
}
|
| 336 |
)
|
|
|
|
|
|
|
| 337 |
|
| 338 |
-
|
| 339 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 340 |
|
| 341 |
|
| 342 |
-
def
|
| 343 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 344 |
|
| 345 |
sheets: dict[str, pd.DataFrame] = {
|
| 346 |
"BCF": df_bcf,
|
| 347 |
-
"BTS":
|
| 348 |
-
"BTS_GPRS":
|
| 349 |
-
"BTS_AMR":
|
| 350 |
-
"HOC":
|
| 351 |
-
"POC":
|
| 352 |
-
"MAL":
|
| 353 |
-
"BTS_PLMNPERMITTED":
|
| 354 |
-
"TRX":
|
| 355 |
}
|
| 356 |
|
| 357 |
bytes_io = io.BytesIO()
|
|
|
|
| 7 |
|
| 8 |
REQUIRED_DUMP_BTS_COLS = ["BSC", "BCF", "BTS", "usedMobileAllocation"]
|
| 9 |
|
| 10 |
+
BTS_EXPORT_COLUMNS = [
|
| 11 |
+
"site",
|
| 12 |
+
"bscid",
|
| 13 |
+
"cellId",
|
| 14 |
+
"bcfId",
|
| 15 |
+
"btsId",
|
| 16 |
+
"Check",
|
| 17 |
+
"bsIdentityCodeNCC",
|
| 18 |
+
"bsIdentityCodeBCC",
|
| 19 |
+
"locationAreaIdLAC",
|
| 20 |
+
"locationAreaIdMCC",
|
| 21 |
+
"locationAreaIdMNC",
|
| 22 |
+
"usedMobileAllocation",
|
| 23 |
+
"malId",
|
| 24 |
+
"name",
|
| 25 |
+
"template_name",
|
| 26 |
+
"sectorId",
|
| 27 |
+
]
|
| 28 |
+
|
| 29 |
|
| 30 |
def _normalize_col(col: object) -> str:
|
| 31 |
return re.sub(r"[^0-9A-Za-z]", "", str(col))
|
|
|
|
| 103 |
return int(m.group(1)) if m else 0
|
| 104 |
|
| 105 |
|
| 106 |
+
def _read_ciq_df(ciq_file) -> pd.DataFrame:
|
| 107 |
+
if hasattr(ciq_file, "seek"):
|
| 108 |
+
ciq_file.seek(0)
|
| 109 |
+
|
| 110 |
+
df = pd.read_excel(ciq_file, engine="calamine")
|
| 111 |
+
df.columns = df.columns.astype(str).str.strip()
|
| 112 |
+
|
| 113 |
+
if "Sites" not in df.columns:
|
| 114 |
+
raise ValueError("CIQ brut is missing required column: Sites")
|
| 115 |
+
|
| 116 |
+
df["Sites"] = df["Sites"].where(df["Sites"].notna(), pd.NA)
|
| 117 |
+
df["Sites"] = df["Sites"].astype("string").str.strip()
|
| 118 |
+
df["site_number"] = df["Sites"].apply(_parse_site_number)
|
| 119 |
+
|
| 120 |
+
if "BSC ID" in df.columns:
|
| 121 |
+
df["BSC ID"] = pd.to_numeric(df["BSC ID"], errors="coerce")
|
| 122 |
+
if "Nbre_TRE_DR" in df.columns:
|
| 123 |
+
df["Nbre_TRE_DR"] = pd.to_numeric(df["Nbre_TRE_DR"], errors="coerce")
|
| 124 |
+
|
| 125 |
+
if "NOM_CELLULE" in df.columns:
|
| 126 |
+
bands_sectors = df["NOM_CELLULE"].apply(_extract_band_and_sector)
|
| 127 |
+
df["band"] = bands_sectors.apply(lambda x: x[0])
|
| 128 |
+
df["sector"] = bands_sectors.apply(lambda x: x[1])
|
| 129 |
+
else:
|
| 130 |
+
df["band"] = None
|
| 131 |
+
df["sector"] = None
|
| 132 |
+
|
| 133 |
+
return df
|
| 134 |
+
|
| 135 |
+
|
| 136 |
def _extract_band_and_sector(cell_name: object) -> tuple[Optional[str], Optional[int]]:
|
| 137 |
if not isinstance(cell_name, str):
|
| 138 |
return None, None
|
|
|
|
| 206 |
return tuple(sorted(ids))
|
| 207 |
|
| 208 |
|
| 209 |
+
def _parse_ciq_sites(ciq_df: pd.DataFrame) -> list[_PlannedSite]:
|
| 210 |
+
required = [
|
| 211 |
+
"Sites",
|
| 212 |
+
"NOM_CELLULE",
|
| 213 |
+
"Nbre_TRE_DR",
|
| 214 |
+
"Nom BSC",
|
| 215 |
+
"BSC ID",
|
| 216 |
+
"band",
|
| 217 |
+
"sector",
|
| 218 |
+
"site_number",
|
| 219 |
+
]
|
| 220 |
+
missing = [c for c in required if c not in ciq_df.columns]
|
| 221 |
if missing:
|
| 222 |
raise ValueError(f"CIQ brut is missing required columns: {missing}")
|
| 223 |
|
| 224 |
+
df = ciq_df[required].copy()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 225 |
|
| 226 |
sites: list[_PlannedSite] = []
|
| 227 |
|
|
|
|
| 258 |
|
| 259 |
|
| 260 |
def _assign_bcfs(
|
| 261 |
+
dump_bts: pd.DataFrame, planned_sites: list[_PlannedSite], ciq_df: pd.DataFrame
|
| 262 |
) -> list[_PlannedSite]:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 263 |
|
| 264 |
dump_bts = dump_bts.dropna(subset=["BSC"])
|
| 265 |
|
|
|
|
| 348 |
|
| 349 |
def build_bcf_sheet(dump_file, ciq_file) -> pd.DataFrame:
|
| 350 |
dump_bts = _read_dump_bts_required_columns(dump_file)
|
| 351 |
+
ciq_df = _read_ciq_df(ciq_file)
|
| 352 |
+
planned_sites = _parse_ciq_sites(ciq_df)
|
| 353 |
+
assigned_sites = _assign_bcfs(dump_bts, planned_sites, ciq_df)
|
| 354 |
+
return _build_bcf_sheet_from_assigned_sites(assigned_sites)
|
| 355 |
|
| 356 |
+
|
| 357 |
+
def _build_bcf_sheet_from_assigned_sites(
|
| 358 |
+
assigned_sites: list[_PlannedSite],
|
| 359 |
+
) -> pd.DataFrame:
|
| 360 |
rows = []
|
| 361 |
for i, s in enumerate(assigned_sites, start=1):
|
| 362 |
rows.append(
|
|
|
|
| 370 |
"Configuration": s.configuration,
|
| 371 |
}
|
| 372 |
)
|
| 373 |
+
return pd.DataFrame(rows)
|
| 374 |
+
|
| 375 |
|
| 376 |
+
def _sector_id_from_band_sector(band: object, sector: object) -> int:
|
| 377 |
+
if band not in {"G9", "G18"}:
|
| 378 |
+
raise ValueError(f"Invalid band '{band}'")
|
| 379 |
+
if sector is None or (isinstance(sector, float) and pd.isna(sector)):
|
| 380 |
+
raise ValueError("Missing sector")
|
| 381 |
+
sec = int(sector)
|
| 382 |
+
if sec not in {1, 2, 3}:
|
| 383 |
+
raise ValueError(f"Invalid sector '{sec}'")
|
| 384 |
+
return sec if band == "G9" else sec + 3
|
| 385 |
|
| 386 |
|
| 387 |
+
def _template_name_from_freq(freq: object) -> str:
|
| 388 |
+
s = str(freq) if freq is not None else ""
|
| 389 |
+
s_u = s.upper()
|
| 390 |
+
if "1800" in s_u:
|
| 391 |
+
return "GSM1800"
|
| 392 |
+
if "900" in s_u:
|
| 393 |
+
return "GSM900"
|
| 394 |
+
return s
|
| 395 |
+
|
| 396 |
+
|
| 397 |
+
def _template_name_from_band(band: object) -> str:
|
| 398 |
+
if band == "G9":
|
| 399 |
+
return "GSM900"
|
| 400 |
+
if band == "G18":
|
| 401 |
+
return "GSM1800"
|
| 402 |
+
return _template_name_from_freq(band)
|
| 403 |
+
|
| 404 |
+
|
| 405 |
+
def _frequency_band_in_use_from_band(band: object) -> int:
|
| 406 |
+
if band == "G9":
|
| 407 |
+
return 0
|
| 408 |
+
if band == "G18":
|
| 409 |
+
return 1
|
| 410 |
+
raise ValueError(f"Invalid band '{band}'")
|
| 411 |
+
|
| 412 |
+
|
| 413 |
+
def _parse_trx_frequencies(value: object) -> list[str]:
|
| 414 |
+
if value is None or (isinstance(value, float) and pd.isna(value)):
|
| 415 |
+
return []
|
| 416 |
+
|
| 417 |
+
s = str(value)
|
| 418 |
+
nums = re.findall(r"\d+", s)
|
| 419 |
+
return nums
|
| 420 |
+
|
| 421 |
+
|
| 422 |
+
def _build_trx_sheet_from_assigned_sites(
|
| 423 |
+
ciq_df: pd.DataFrame, assigned_sites: list[_PlannedSite]
|
| 424 |
+
) -> pd.DataFrame:
|
| 425 |
+
assigned_by_site = {s.site_name: s for s in assigned_sites}
|
| 426 |
+
|
| 427 |
+
required = [
|
| 428 |
+
"Sites",
|
| 429 |
+
"CI",
|
| 430 |
+
"band",
|
| 431 |
+
"sector",
|
| 432 |
+
"BCCH",
|
| 433 |
+
"TRX",
|
| 434 |
+
"BCC",
|
| 435 |
+
]
|
| 436 |
+
missing = [c for c in required if c not in ciq_df.columns]
|
| 437 |
+
if missing:
|
| 438 |
+
raise ValueError(
|
| 439 |
+
f"CIQ brut is missing required columns for TRX sheet: {missing}"
|
| 440 |
+
)
|
| 441 |
+
|
| 442 |
+
rows = []
|
| 443 |
+
|
| 444 |
+
bcch_types = [4, 8, 6, 2, 2, 2, 2, 2]
|
| 445 |
+
|
| 446 |
+
for _, r in ciq_df[ciq_df["Sites"].isin(assigned_by_site.keys())].iterrows():
|
| 447 |
+
site_name = r["Sites"]
|
| 448 |
+
site = assigned_by_site.get(site_name)
|
| 449 |
+
if site is None or site.assigned_bcf is None:
|
| 450 |
+
continue
|
| 451 |
+
|
| 452 |
+
sector_id = _sector_id_from_band_sector(r.get("band"), r.get("sector"))
|
| 453 |
+
bts_id = int(site.assigned_bcf) + int(sector_id)
|
| 454 |
+
|
| 455 |
+
cell_id = pd.to_numeric(r.get("CI"), errors="coerce")
|
| 456 |
+
bcch = pd.to_numeric(r.get("BCCH"), errors="coerce")
|
| 457 |
+
bcc = pd.to_numeric(r.get("BCC"), errors="coerce")
|
| 458 |
+
|
| 459 |
+
freq_band = _frequency_band_in_use_from_band(r.get("band"))
|
| 460 |
+
|
| 461 |
+
base = {
|
| 462 |
+
"site": int(site.site_number),
|
| 463 |
+
"bscid": int(site.bsc),
|
| 464 |
+
"cellId": int(cell_id) if not pd.isna(cell_id) else None,
|
| 465 |
+
"bcfId": int(site.assigned_bcf),
|
| 466 |
+
"btsId": int(bts_id),
|
| 467 |
+
"tsc": int(bcc) if not pd.isna(bcc) else None,
|
| 468 |
+
"FrequencyBandinUse": int(freq_band),
|
| 469 |
+
}
|
| 470 |
+
|
| 471 |
+
bcch_row = dict(base)
|
| 472 |
+
bcch_row["TRX"] = None
|
| 473 |
+
bcch_row["initialFrequency"] = int(bcch) if not pd.isna(bcch) else None
|
| 474 |
+
bcch_row["_sort_type"] = 0
|
| 475 |
+
bcch_row["_sort_maio"] = -1
|
| 476 |
+
|
| 477 |
+
for i in range(8):
|
| 478 |
+
bcch_row[f"channel{i}Maio"] = None
|
| 479 |
+
bcch_row[f"channel{i}Type"] = bcch_types[i]
|
| 480 |
+
rows.append(bcch_row)
|
| 481 |
+
|
| 482 |
+
trx_list = _parse_trx_frequencies(r.get("TRX"))
|
| 483 |
+
if not pd.isna(bcch):
|
| 484 |
+
bcch_str = str(int(bcch))
|
| 485 |
+
trx_list = [x for x in trx_list if x != bcch_str]
|
| 486 |
+
|
| 487 |
+
for maio, f in enumerate(trx_list):
|
| 488 |
+
tr_row = dict(base)
|
| 489 |
+
tr_row["TRX"] = None
|
| 490 |
+
tr_row["initialFrequency"] = int(f)
|
| 491 |
+
tr_row["_sort_type"] = 1
|
| 492 |
+
tr_row["_sort_maio"] = int(maio)
|
| 493 |
+
for i in range(8):
|
| 494 |
+
tr_row[f"channel{i}Maio"] = maio
|
| 495 |
+
tr_row[f"channel{i}Type"] = 3 if i == 0 else 2
|
| 496 |
+
rows.append(tr_row)
|
| 497 |
+
|
| 498 |
+
df_trx = pd.DataFrame(rows)
|
| 499 |
+
if df_trx.empty:
|
| 500 |
+
return df_trx
|
| 501 |
+
|
| 502 |
+
ordered_cols = [
|
| 503 |
+
"site",
|
| 504 |
+
"bscid",
|
| 505 |
+
"cellId",
|
| 506 |
+
"bcfId",
|
| 507 |
+
"btsId",
|
| 508 |
+
"TRX",
|
| 509 |
+
"tsc",
|
| 510 |
+
"FrequencyBandinUse",
|
| 511 |
+
"initialFrequency",
|
| 512 |
+
]
|
| 513 |
+
for i in range(8):
|
| 514 |
+
ordered_cols.append(f"channel{i}Maio")
|
| 515 |
+
ordered_cols.append(f"channel{i}Type")
|
| 516 |
+
|
| 517 |
+
df_trx = df_trx.sort_values(
|
| 518 |
+
by=["site", "btsId", "_sort_type", "_sort_maio"], kind="stable"
|
| 519 |
+
)
|
| 520 |
+
df_trx["TRX"] = range(1, len(df_trx) + 1)
|
| 521 |
+
df_trx = df_trx[ordered_cols]
|
| 522 |
+
return df_trx
|
| 523 |
+
|
| 524 |
+
|
| 525 |
+
def build_bts_sheet(dump_file, ciq_file, mcc: int = 610, mnc: int = 2) -> pd.DataFrame:
|
| 526 |
+
dump_bts = _read_dump_bts_required_columns(dump_file)
|
| 527 |
+
ciq_df = _read_ciq_df(ciq_file)
|
| 528 |
+
|
| 529 |
+
planned_sites = _parse_ciq_sites(ciq_df)
|
| 530 |
+
assigned_sites = _assign_bcfs(dump_bts, planned_sites, ciq_df)
|
| 531 |
+
return _build_bts_sheet_from_assigned_sites(
|
| 532 |
+
ciq_df, assigned_sites, mcc=mcc, mnc=mnc
|
| 533 |
+
)
|
| 534 |
+
|
| 535 |
+
|
| 536 |
+
def _build_bts_sheet_from_assigned_sites(
|
| 537 |
+
ciq_df: pd.DataFrame, assigned_sites: list[_PlannedSite], mcc: int, mnc: int
|
| 538 |
+
) -> pd.DataFrame:
|
| 539 |
+
assigned_by_site = {s.site_name: s for s in assigned_sites}
|
| 540 |
+
|
| 541 |
+
required = [
|
| 542 |
+
"Sites",
|
| 543 |
+
"NOM_CELLULE",
|
| 544 |
+
"CI",
|
| 545 |
+
"LAC",
|
| 546 |
+
"Frequence",
|
| 547 |
+
"NCC",
|
| 548 |
+
"BCC",
|
| 549 |
+
"band",
|
| 550 |
+
"sector",
|
| 551 |
+
]
|
| 552 |
+
missing = [c for c in required if c not in ciq_df.columns]
|
| 553 |
+
if missing:
|
| 554 |
+
raise ValueError(
|
| 555 |
+
f"CIQ brut is missing required columns for BTS sheet: {missing}"
|
| 556 |
+
)
|
| 557 |
+
|
| 558 |
+
rows = []
|
| 559 |
+
for _, r in ciq_df[ciq_df["Sites"].isin(assigned_by_site.keys())].iterrows():
|
| 560 |
+
site_name = r["Sites"]
|
| 561 |
+
site = assigned_by_site.get(site_name)
|
| 562 |
+
if site is None or site.assigned_bcf is None:
|
| 563 |
+
continue
|
| 564 |
+
|
| 565 |
+
sector_id = _sector_id_from_band_sector(r.get("band"), r.get("sector"))
|
| 566 |
+
bts_id = int(site.assigned_bcf) + int(sector_id)
|
| 567 |
+
cell_id = pd.to_numeric(r.get("CI"), errors="coerce")
|
| 568 |
+
lac = pd.to_numeric(r.get("LAC"), errors="coerce")
|
| 569 |
+
ncc = pd.to_numeric(r.get("NCC"), errors="coerce")
|
| 570 |
+
bcc = pd.to_numeric(r.get("BCC"), errors="coerce")
|
| 571 |
+
|
| 572 |
+
rows.append(
|
| 573 |
+
{
|
| 574 |
+
"site": int(site.site_number),
|
| 575 |
+
"bscid": int(site.bsc),
|
| 576 |
+
"cellId": int(cell_id) if not pd.isna(cell_id) else None,
|
| 577 |
+
"bcfId": int(site.assigned_bcf),
|
| 578 |
+
"btsId": int(bts_id),
|
| 579 |
+
"Check": int(sector_id),
|
| 580 |
+
"bsIdentityCodeNCC": int(ncc) if not pd.isna(ncc) else None,
|
| 581 |
+
"bsIdentityCodeBCC": int(bcc) if not pd.isna(bcc) else None,
|
| 582 |
+
"locationAreaIdLAC": int(lac) if not pd.isna(lac) else None,
|
| 583 |
+
"locationAreaIdMCC": int(mcc),
|
| 584 |
+
"locationAreaIdMNC": int(mnc),
|
| 585 |
+
"usedMobileAllocation": int(bts_id),
|
| 586 |
+
"malId": int(bts_id),
|
| 587 |
+
"name": f"{str(r.get('NOM_CELLULE'))}_NA",
|
| 588 |
+
"template_name": _template_name_from_band(r.get("band")),
|
| 589 |
+
"sectorId": int(sector_id),
|
| 590 |
+
}
|
| 591 |
+
)
|
| 592 |
+
|
| 593 |
+
df_bts = pd.DataFrame(rows)
|
| 594 |
+
if not df_bts.empty:
|
| 595 |
+
df_bts = df_bts[BTS_EXPORT_COLUMNS].sort_values(
|
| 596 |
+
by=["site", "sectorId"], kind="stable"
|
| 597 |
+
)
|
| 598 |
+
return df_bts
|
| 599 |
+
|
| 600 |
+
|
| 601 |
+
def _build_mal_sheet_from_assigned_sites(
|
| 602 |
+
ciq_df: pd.DataFrame, assigned_sites: list[_PlannedSite]
|
| 603 |
+
) -> pd.DataFrame:
|
| 604 |
+
assigned_by_site = {s.site_name: s for s in assigned_sites}
|
| 605 |
+
|
| 606 |
+
required = [
|
| 607 |
+
"Sites",
|
| 608 |
+
"CI",
|
| 609 |
+
"band",
|
| 610 |
+
"sector",
|
| 611 |
+
"BCCH",
|
| 612 |
+
"TRX",
|
| 613 |
+
]
|
| 614 |
+
missing = [c for c in required if c not in ciq_df.columns]
|
| 615 |
+
if missing:
|
| 616 |
+
raise ValueError(
|
| 617 |
+
f"CIQ brut is missing required columns for MAL sheet: {missing}"
|
| 618 |
+
)
|
| 619 |
+
|
| 620 |
+
rows = []
|
| 621 |
+
for _, r in ciq_df[ciq_df["Sites"].isin(assigned_by_site.keys())].iterrows():
|
| 622 |
+
site_name = r["Sites"]
|
| 623 |
+
site = assigned_by_site.get(site_name)
|
| 624 |
+
if site is None or site.assigned_bcf is None:
|
| 625 |
+
continue
|
| 626 |
+
|
| 627 |
+
sector_id = _sector_id_from_band_sector(r.get("band"), r.get("sector"))
|
| 628 |
+
bts_id = int(site.assigned_bcf) + int(sector_id)
|
| 629 |
+
|
| 630 |
+
cell_id = pd.to_numeric(r.get("CI"), errors="coerce")
|
| 631 |
+
bcch = pd.to_numeric(r.get("BCCH"), errors="coerce")
|
| 632 |
+
|
| 633 |
+
trx_list = _parse_trx_frequencies(r.get("TRX"))
|
| 634 |
+
freq_str = ", ".join(trx_list)
|
| 635 |
+
|
| 636 |
+
row = {
|
| 637 |
+
"site": int(site.site_number),
|
| 638 |
+
"siteId": int(site.site_number),
|
| 639 |
+
"bscid": int(site.bsc),
|
| 640 |
+
"cellId": int(cell_id) if not pd.isna(cell_id) else None,
|
| 641 |
+
"bcfId": int(site.assigned_bcf),
|
| 642 |
+
"btsId": int(bts_id),
|
| 643 |
+
"frequencyBandInUse": _frequency_band_in_use_from_band(r.get("band")),
|
| 644 |
+
"malId": int(bts_id),
|
| 645 |
+
"initial frequency": int(bcch) if not pd.isna(bcch) else None,
|
| 646 |
+
"frequency": freq_str if freq_str else None,
|
| 647 |
+
}
|
| 648 |
+
|
| 649 |
+
for i in range(1, 7):
|
| 650 |
+
row[f"frequency{i}"] = trx_list[i - 1] if len(trx_list) >= i else None
|
| 651 |
+
|
| 652 |
+
rows.append(row)
|
| 653 |
+
|
| 654 |
+
df_mal = pd.DataFrame(rows)
|
| 655 |
+
if df_mal.empty:
|
| 656 |
+
return df_mal
|
| 657 |
+
|
| 658 |
+
ordered_cols = [
|
| 659 |
+
"site",
|
| 660 |
+
"siteId",
|
| 661 |
+
"bscid",
|
| 662 |
+
"cellId",
|
| 663 |
+
"bcfId",
|
| 664 |
+
"btsId",
|
| 665 |
+
"frequencyBandInUse",
|
| 666 |
+
"malId",
|
| 667 |
+
"initial frequency",
|
| 668 |
+
"frequency",
|
| 669 |
+
"frequency1",
|
| 670 |
+
"frequency2",
|
| 671 |
+
"frequency3",
|
| 672 |
+
"frequency4",
|
| 673 |
+
"frequency5",
|
| 674 |
+
"frequency6",
|
| 675 |
+
]
|
| 676 |
+
df_mal = df_mal[ordered_cols].sort_values(by=["site", "btsId"], kind="stable")
|
| 677 |
+
return df_mal
|
| 678 |
+
|
| 679 |
+
|
| 680 |
+
def generate_ciq_2g_excel(
|
| 681 |
+
dump_file, ciq_file, mcc: int = 610, mnc: int = 2
|
| 682 |
+
) -> tuple[dict[str, pd.DataFrame], bytes]:
|
| 683 |
+
dump_bts = _read_dump_bts_required_columns(dump_file)
|
| 684 |
+
ciq_df = _read_ciq_df(ciq_file)
|
| 685 |
+
planned_sites = _parse_ciq_sites(ciq_df)
|
| 686 |
+
assigned_sites = _assign_bcfs(dump_bts, planned_sites, ciq_df)
|
| 687 |
+
|
| 688 |
+
df_bcf = _build_bcf_sheet_from_assigned_sites(assigned_sites)
|
| 689 |
+
df_bts = _build_bts_sheet_from_assigned_sites(
|
| 690 |
+
ciq_df, assigned_sites, mcc=mcc, mnc=mnc
|
| 691 |
+
)
|
| 692 |
+
df_mal = _build_mal_sheet_from_assigned_sites(ciq_df, assigned_sites)
|
| 693 |
+
df_trx = _build_trx_sheet_from_assigned_sites(ciq_df, assigned_sites)
|
| 694 |
+
|
| 695 |
+
df_bts_min = pd.DataFrame()
|
| 696 |
+
if not df_bts.empty:
|
| 697 |
+
df_bts_min = df_bts[["site", "bscid", "cellId", "bcfId", "btsId"]].rename(
|
| 698 |
+
columns={"site": "Site"}
|
| 699 |
+
)
|
| 700 |
+
|
| 701 |
+
df_hoc = pd.DataFrame()
|
| 702 |
+
df_poc = pd.DataFrame()
|
| 703 |
+
if not df_bts.empty:
|
| 704 |
+
base = df_bts[
|
| 705 |
+
["site", "bscid", "cellId", "bcfId", "btsId", "template_name"]
|
| 706 |
+
].rename(columns={"site": "Site"})
|
| 707 |
+
|
| 708 |
+
df_hoc = base.copy()
|
| 709 |
+
df_hoc.insert(5, "hocId", 1)
|
| 710 |
+
df_hoc = df_hoc[
|
| 711 |
+
["Site", "bscid", "cellId", "bcfId", "btsId", "hocId", "template_name"]
|
| 712 |
+
]
|
| 713 |
+
|
| 714 |
+
df_poc = base.copy()
|
| 715 |
+
df_poc.insert(5, "pocId", 1)
|
| 716 |
+
df_poc = df_poc[
|
| 717 |
+
["Site", "bscid", "cellId", "bcfId", "btsId", "pocId", "template_name"]
|
| 718 |
+
]
|
| 719 |
+
|
| 720 |
+
df_plmn_permitted = pd.DataFrame()
|
| 721 |
+
if not df_bts.empty:
|
| 722 |
+
base_plmn = df_bts[["bscid", "cellId", "bcfId", "btsId"]].rename(
|
| 723 |
+
columns={"bscid": "BSCId"}
|
| 724 |
+
)
|
| 725 |
+
df_plmn_permitted = base_plmn.loc[base_plmn.index.repeat(8)].reset_index(
|
| 726 |
+
drop=True
|
| 727 |
+
)
|
| 728 |
+
df_plmn_permitted["template_name"] = list(range(1, 9)) * len(base_plmn)
|
| 729 |
+
df_plmn_permitted["plmnPermitted"] = "List;1;1;1;1;1;1;1;1"
|
| 730 |
+
df_plmn_permitted = df_plmn_permitted[
|
| 731 |
+
["BSCId", "cellId", "bcfId", "btsId", "template_name", "plmnPermitted"]
|
| 732 |
+
]
|
| 733 |
|
| 734 |
sheets: dict[str, pd.DataFrame] = {
|
| 735 |
"BCF": df_bcf,
|
| 736 |
+
"BTS": df_bts,
|
| 737 |
+
"BTS_GPRS": df_bts_min,
|
| 738 |
+
"BTS_AMR": df_bts_min,
|
| 739 |
+
"HOC": df_hoc,
|
| 740 |
+
"POC": df_poc,
|
| 741 |
+
"MAL": df_mal,
|
| 742 |
+
"BTS_PLMNPERMITTED": df_plmn_permitted,
|
| 743 |
+
"TRX": df_trx,
|
| 744 |
}
|
| 745 |
|
| 746 |
bytes_io = io.BytesIO()
|
queries/process_ciq_3g.py
ADDED
|
@@ -0,0 +1,276 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
import re
|
| 3 |
+
from dataclasses import dataclass
|
| 4 |
+
from typing import Optional
|
| 5 |
+
|
| 6 |
+
import pandas as pd
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def _parse_int(value: object) -> Optional[int]:
|
| 10 |
+
v = pd.to_numeric(value, errors="coerce")
|
| 11 |
+
if pd.isna(v):
|
| 12 |
+
return None
|
| 13 |
+
return int(v)
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def _base_site_name_from_sites(sites: object) -> str:
|
| 17 |
+
if not isinstance(sites, str):
|
| 18 |
+
return ""
|
| 19 |
+
|
| 20 |
+
s = sites.strip()
|
| 21 |
+
|
| 22 |
+
for suffix in ["_3G", "_3g"]:
|
| 23 |
+
if s.endswith(suffix):
|
| 24 |
+
return s[: -len(suffix)]
|
| 25 |
+
|
| 26 |
+
return s
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def read_ciq_3g_brut(ciq_file) -> pd.DataFrame:
|
| 30 |
+
if hasattr(ciq_file, "seek"):
|
| 31 |
+
ciq_file.seek(0)
|
| 32 |
+
|
| 33 |
+
df = pd.read_excel(ciq_file, engine="calamine")
|
| 34 |
+
df.columns = df.columns.astype(str).str.strip()
|
| 35 |
+
|
| 36 |
+
if "Sites" not in df.columns:
|
| 37 |
+
raise ValueError("CIQ 3G brut is missing required column: Sites")
|
| 38 |
+
|
| 39 |
+
df["Sites"] = df["Sites"].where(df["Sites"].notna(), pd.NA)
|
| 40 |
+
df["Sites"] = df["Sites"].astype("string").str.strip()
|
| 41 |
+
|
| 42 |
+
return df
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def _band_from_cell_name(cell_name: object) -> str:
|
| 46 |
+
if not isinstance(cell_name, str):
|
| 47 |
+
return ""
|
| 48 |
+
s = cell_name.upper()
|
| 49 |
+
if "_U900" in s or s.endswith("U900"):
|
| 50 |
+
return "U900"
|
| 51 |
+
if "_U2100" in s or s.endswith("U2100"):
|
| 52 |
+
return "U2100"
|
| 53 |
+
return ""
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def _band_label(band: str) -> str:
|
| 57 |
+
if band == "U900":
|
| 58 |
+
return "U900 (U9)"
|
| 59 |
+
if band == "U2100":
|
| 60 |
+
return "U2100 (U21)"
|
| 61 |
+
return band
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def _cell_number_from_cell_name(cell_name: object) -> Optional[int]:
|
| 65 |
+
if not isinstance(cell_name, str):
|
| 66 |
+
return None
|
| 67 |
+
m = re.search(r"_(\d+)_U(?:900|2100)\b", cell_name.upper())
|
| 68 |
+
if not m:
|
| 69 |
+
return None
|
| 70 |
+
try:
|
| 71 |
+
v = int(m.group(1))
|
| 72 |
+
except ValueError:
|
| 73 |
+
return None
|
| 74 |
+
return v if v > 0 else None
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def _sector_id_from_cell_name(cell_name: object) -> int:
|
| 78 |
+
cell_no = _cell_number_from_cell_name(cell_name)
|
| 79 |
+
if cell_no is None:
|
| 80 |
+
raise ValueError(f"Cannot derive SectorID from NOM_CELLULE='{cell_name}'")
|
| 81 |
+
return ((int(cell_no) - 1) % 3) + 1
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
def _tcell_from_band_and_sector(band: str, sector_id: int) -> int:
|
| 85 |
+
if band == "U900":
|
| 86 |
+
return sector_id + 2 # 1->3, 2->4, 3->5
|
| 87 |
+
# U2100
|
| 88 |
+
tcell_map = {1: 0, 2: 1, 3: 3}
|
| 89 |
+
if sector_id not in tcell_map:
|
| 90 |
+
raise ValueError(f"Invalid SectorID '{sector_id}' for Tcell")
|
| 91 |
+
return tcell_map[sector_id]
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def build_wcel_sheet(ciq_df: pd.DataFrame) -> pd.DataFrame:
|
| 95 |
+
required = [
|
| 96 |
+
"Sites",
|
| 97 |
+
"NodeB_ID",
|
| 98 |
+
"NOM_CELLULE",
|
| 99 |
+
"CELLID",
|
| 100 |
+
"SAC",
|
| 101 |
+
"LAC",
|
| 102 |
+
"RAC",
|
| 103 |
+
"FREQUENCE",
|
| 104 |
+
"PSCRAMBCODE",
|
| 105 |
+
"RNC_id",
|
| 106 |
+
]
|
| 107 |
+
missing = [c for c in required if c not in ciq_df.columns]
|
| 108 |
+
if missing:
|
| 109 |
+
raise ValueError(f"CIQ 3G brut is missing required columns for WCEL: {missing}")
|
| 110 |
+
|
| 111 |
+
rows = []
|
| 112 |
+
for site_key, group in ciq_df.groupby(["NodeB_ID", "RNC_id"], dropna=False):
|
| 113 |
+
nodeb_id_raw, rnc_id_raw = site_key
|
| 114 |
+
nodeb_id = _parse_int(nodeb_id_raw)
|
| 115 |
+
rnc_id = _parse_int(rnc_id_raw)
|
| 116 |
+
if nodeb_id is None or rnc_id is None:
|
| 117 |
+
continue
|
| 118 |
+
|
| 119 |
+
tmp = group.copy()
|
| 120 |
+
tmp["_band"] = tmp["NOM_CELLULE"].apply(_band_from_cell_name)
|
| 121 |
+
|
| 122 |
+
# U2100 LcrId grouping by UARFCN (FREQUENCE)
|
| 123 |
+
u2100 = tmp[tmp["_band"] == "U2100"].copy()
|
| 124 |
+
u2100_uarfcns = sorted(
|
| 125 |
+
pd.to_numeric(u2100["FREQUENCE"], errors="coerce")
|
| 126 |
+
.dropna()
|
| 127 |
+
.astype(int)
|
| 128 |
+
.unique()
|
| 129 |
+
)
|
| 130 |
+
u2100_base_by_uarfcn = {
|
| 131 |
+
uarfcn: 1 + 3 * idx for idx, uarfcn in enumerate(u2100_uarfcns)
|
| 132 |
+
}
|
| 133 |
+
|
| 134 |
+
for _, r in tmp.iterrows():
|
| 135 |
+
band = r.get("_band")
|
| 136 |
+
if band not in {"U900", "U2100"}:
|
| 137 |
+
continue
|
| 138 |
+
|
| 139 |
+
uarfcn = _parse_int(r.get("FREQUENCE"))
|
| 140 |
+
if uarfcn is None:
|
| 141 |
+
continue
|
| 142 |
+
|
| 143 |
+
sector_id = _sector_id_from_cell_name(r.get("NOM_CELLULE"))
|
| 144 |
+
|
| 145 |
+
if band == "U900":
|
| 146 |
+
lcr_id = 9 + sector_id # 10..12
|
| 147 |
+
else:
|
| 148 |
+
base = u2100_base_by_uarfcn.get(uarfcn)
|
| 149 |
+
if base is None:
|
| 150 |
+
# Should not happen, but keep safe
|
| 151 |
+
base = 1
|
| 152 |
+
lcr_id = base + (sector_id - 1)
|
| 153 |
+
|
| 154 |
+
cid = _parse_int(r.get("CELLID"))
|
| 155 |
+
lac = _parse_int(r.get("LAC"))
|
| 156 |
+
rac = _parse_int(r.get("RAC"))
|
| 157 |
+
sac = _parse_int(r.get("SAC"))
|
| 158 |
+
|
| 159 |
+
name = f"{str(r.get('NOM_CELLULE'))}_NA"
|
| 160 |
+
|
| 161 |
+
rows.append(
|
| 162 |
+
{
|
| 163 |
+
"Site": nodeb_id,
|
| 164 |
+
"RncId": rnc_id,
|
| 165 |
+
"WBTSId": nodeb_id,
|
| 166 |
+
"LcrId": int(lcr_id),
|
| 167 |
+
"Band": _band_label(band),
|
| 168 |
+
"CId": cid,
|
| 169 |
+
"LAC": lac,
|
| 170 |
+
"name": name,
|
| 171 |
+
"PriScrCode": _parse_int(r.get("PSCRAMBCODE")),
|
| 172 |
+
"PWSMCellGroup": int(sector_id),
|
| 173 |
+
"RAC": rac,
|
| 174 |
+
"SAC": sac,
|
| 175 |
+
"Tcell": _tcell_from_band_and_sector(band, int(sector_id)),
|
| 176 |
+
"UARFCN": int(uarfcn),
|
| 177 |
+
"SectorID": int(sector_id),
|
| 178 |
+
}
|
| 179 |
+
)
|
| 180 |
+
|
| 181 |
+
df_wcel = pd.DataFrame(rows)
|
| 182 |
+
if df_wcel.empty:
|
| 183 |
+
return df_wcel
|
| 184 |
+
|
| 185 |
+
ordered = [
|
| 186 |
+
"Site",
|
| 187 |
+
"RncId",
|
| 188 |
+
"WBTSId",
|
| 189 |
+
"LcrId",
|
| 190 |
+
"Band",
|
| 191 |
+
"CId",
|
| 192 |
+
"LAC",
|
| 193 |
+
"name",
|
| 194 |
+
"PriScrCode",
|
| 195 |
+
"PWSMCellGroup",
|
| 196 |
+
"RAC",
|
| 197 |
+
"SAC",
|
| 198 |
+
"Tcell",
|
| 199 |
+
"UARFCN",
|
| 200 |
+
"SectorID",
|
| 201 |
+
]
|
| 202 |
+
df_wcel = df_wcel[ordered].sort_values(by=["Site", "LcrId"], kind="stable")
|
| 203 |
+
return df_wcel
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
def build_wbts_sheet(
|
| 207 |
+
ciq_df: pd.DataFrame, year_suffix: str, bands: str
|
| 208 |
+
) -> pd.DataFrame:
|
| 209 |
+
required = ["Sites", "NodeB_ID", "RNC_id"]
|
| 210 |
+
missing = [c for c in required if c not in ciq_df.columns]
|
| 211 |
+
if missing:
|
| 212 |
+
raise ValueError(f"CIQ 3G brut is missing required columns for WBTS: {missing}")
|
| 213 |
+
|
| 214 |
+
rows = []
|
| 215 |
+
for sites, group in ciq_df.groupby("Sites", dropna=False):
|
| 216 |
+
if sites is None or (isinstance(sites, float) and pd.isna(sites)):
|
| 217 |
+
continue
|
| 218 |
+
sites_str = str(sites).strip()
|
| 219 |
+
if not sites_str:
|
| 220 |
+
continue
|
| 221 |
+
|
| 222 |
+
nodeb_ids = pd.to_numeric(group["NodeB_ID"], errors="coerce").dropna().unique()
|
| 223 |
+
if len(nodeb_ids) == 0:
|
| 224 |
+
raise ValueError(f"Missing NodeB_ID for site '{sites_str}'")
|
| 225 |
+
nodeb_id = int(nodeb_ids[0])
|
| 226 |
+
|
| 227 |
+
rnc_ids = pd.to_numeric(group["RNC_id"], errors="coerce").dropna().unique()
|
| 228 |
+
if len(rnc_ids) == 0:
|
| 229 |
+
raise ValueError(f"Missing RNC_id for site '{sites_str}'")
|
| 230 |
+
rnc_id = int(rnc_ids[0])
|
| 231 |
+
|
| 232 |
+
base_name = _base_site_name_from_sites(sites_str)
|
| 233 |
+
name = f"{base_name}_{year_suffix}_{bands}_NA"
|
| 234 |
+
wbts_name = f"{sites_str}_NA"
|
| 235 |
+
|
| 236 |
+
rows.append(
|
| 237 |
+
{
|
| 238 |
+
"S": nodeb_id,
|
| 239 |
+
"Name": name,
|
| 240 |
+
"RncId": rnc_id,
|
| 241 |
+
"WBTSId": nodeb_id,
|
| 242 |
+
"name": wbts_name,
|
| 243 |
+
"WBTSName": wbts_name,
|
| 244 |
+
}
|
| 245 |
+
)
|
| 246 |
+
|
| 247 |
+
df_wbts = pd.DataFrame(rows)
|
| 248 |
+
if not df_wbts.empty:
|
| 249 |
+
df_wbts = df_wbts[
|
| 250 |
+
["S", "Name", "RncId", "WBTSId", "name", "WBTSName"]
|
| 251 |
+
].sort_values(by=["S"], kind="stable")
|
| 252 |
+
|
| 253 |
+
return df_wbts
|
| 254 |
+
|
| 255 |
+
|
| 256 |
+
def generate_ciq_3g_excel(
|
| 257 |
+
ciq_file,
|
| 258 |
+
year_suffix: str = "25",
|
| 259 |
+
bands: str = "G9G18U9U21L8L18L26",
|
| 260 |
+
) -> tuple[dict[str, pd.DataFrame], bytes]:
|
| 261 |
+
ciq_df = read_ciq_3g_brut(ciq_file)
|
| 262 |
+
|
| 263 |
+
df_wbts = build_wbts_sheet(ciq_df, year_suffix=year_suffix, bands=bands)
|
| 264 |
+
df_wcel = build_wcel_sheet(ciq_df)
|
| 265 |
+
|
| 266 |
+
sheets: dict[str, pd.DataFrame] = {
|
| 267 |
+
"WBTS": df_wbts,
|
| 268 |
+
"WCEL": df_wcel,
|
| 269 |
+
}
|
| 270 |
+
|
| 271 |
+
bytes_io = io.BytesIO()
|
| 272 |
+
with pd.ExcelWriter(bytes_io, engine="xlsxwriter") as writer:
|
| 273 |
+
for sheet_name, df in sheets.items():
|
| 274 |
+
df.to_excel(writer, sheet_name=sheet_name, index=False)
|
| 275 |
+
|
| 276 |
+
return sheets, bytes_io.getvalue()
|