DavMelchi commited on
Commit
56d8047
·
1 Parent(s): 01dd930

adding TRX database

Browse files
README.md CHANGED
@@ -40,6 +40,7 @@ You can access the hosted version of the app at [https://davmelchi-db-query.hf.s
40
  - [x] Add page to update physical db
41
  - [x] Add Core dump checking App
42
  - [x] Add site config band in database
 
43
  - [ ] Add dashboards for each database (Count of NE)
44
  - [ ] Add the ability to select columns
45
  - [ ] Error handling
 
40
  - [x] Add page to update physical db
41
  - [x] Add Core dump checking App
42
  - [x] Add site config band in database
43
+ - [x] Add TRX database
44
  - [ ] Add dashboards for each database (Count of NE)
45
  - [ ] Add the ability to select columns
46
  - [ ] Error handling
apps/database_page.py CHANGED
@@ -9,6 +9,7 @@ from queries.process_neighbors import (
9
  process_neighbors_data,
10
  process_neighbors_data_to_excel,
11
  )
 
12
  from queries.process_wcdma import process_wcdma_data_to_excel
13
  from utils.check_sheet_exist import Technology, execute_checks_sheets_exist
14
  from utils.utils_vars import UtilsVars
@@ -46,6 +47,9 @@ def download_button(database_type):
46
  elif database_type == "NEI":
47
  data = UtilsVars.neighbors_database
48
  file_name = f"Neighbors database_{time.time()}.xlsx"
 
 
 
49
  st.download_button(
50
  type="primary",
51
  label=f"Download {database_type} Database File",
@@ -76,6 +80,8 @@ if uploaded_file is not None:
76
  Technology.gsm == False
77
  and Technology.wcdma == False
78
  and Technology.lte == False
 
 
79
  ):
80
  st.error(
81
  """
@@ -84,6 +90,7 @@ if uploaded_file is not None:
84
  "wcdma": ["WCEL", "WBTS", "WNCEL"],
85
  "lte": ["LNBTS", "LNCEL", "LNCEL_FDD", "LNCEL_TDD"],
86
  "neighbors": ["ADCE", "ADJS", "ADJI", "ADJG", "ADJW", "BTS", "WCEL"],
 
87
  """
88
  )
89
 
@@ -122,6 +129,14 @@ if uploaded_file is not None:
122
  ),
123
  # on_click=lambda: process_neighbors_data(uploaded_file),
124
  )
 
 
 
 
 
 
 
 
125
 
126
  except Exception as e:
127
  st.error(f"Error: {e}")
 
9
  process_neighbors_data,
10
  process_neighbors_data_to_excel,
11
  )
12
+ from queries.process_trx import process_trx_with_bts_name_data_to_excel
13
  from queries.process_wcdma import process_wcdma_data_to_excel
14
  from utils.check_sheet_exist import Technology, execute_checks_sheets_exist
15
  from utils.utils_vars import UtilsVars
 
47
  elif database_type == "NEI":
48
  data = UtilsVars.neighbors_database
49
  file_name = f"Neighbors database_{time.time()}.xlsx"
50
+ elif database_type == "TRX":
51
+ data = UtilsVars.final_trx_database
52
+ file_name = f"TRX database_{time.time()}.xlsx"
53
  st.download_button(
54
  type="primary",
55
  label=f"Download {database_type} Database File",
 
80
  Technology.gsm == False
81
  and Technology.wcdma == False
82
  and Technology.lte == False
83
+ and Technology.neighbors == False
84
+ and Technology.trx == False
85
  ):
86
  st.error(
87
  """
 
90
  "wcdma": ["WCEL", "WBTS", "WNCEL"],
91
  "lte": ["LNBTS", "LNCEL", "LNCEL_FDD", "LNCEL_TDD"],
92
  "neighbors": ["ADCE", "ADJS", "ADJI", "ADJG", "ADJW", "BTS", "WCEL"],
93
+ "trx": ["TRX", "BTS"],
94
  """
95
  )
96
 
 
129
  ),
130
  # on_click=lambda: process_neighbors_data(uploaded_file),
131
  )
132
+ if Technology.trx == True:
133
+ with col6:
134
+ st.button(
135
+ "Generate TRX DB",
136
+ on_click=lambda: process_database(
137
+ process_trx_with_bts_name_data_to_excel, "TRX"
138
+ ),
139
+ )
140
 
141
  except Exception as e:
142
  st.error(f"Error: {e}")
queries/process_all_db.py CHANGED
@@ -1,5 +1,6 @@
1
  from queries.process_gsm import process_gsm_data
2
  from queries.process_lte import process_lte_data
 
3
  from queries.process_wcdma import process_wcdma_data
4
  from utils.convert_to_excel import convert_dfs
5
  from utils.utils_vars import UtilsVars
@@ -10,7 +11,8 @@ def process_all_tech_db(filepath: str):
10
  process_gsm_data(filepath)
11
  process_wcdma_data(filepath)
12
  process_lte_data(filepath)
 
13
 
14
  UtilsVars.final_all_database = convert_dfs(
15
- UtilsVars.all_db_dfs, ["GSM", "WCDMA", "LTE_FDD", "LTE_TDD"]
16
  )
 
1
  from queries.process_gsm import process_gsm_data
2
  from queries.process_lte import process_lte_data
3
+ from queries.process_trx import trx_with_bts_name
4
  from queries.process_wcdma import process_wcdma_data
5
  from utils.convert_to_excel import convert_dfs
6
  from utils.utils_vars import UtilsVars
 
11
  process_gsm_data(filepath)
12
  process_wcdma_data(filepath)
13
  process_lte_data(filepath)
14
+ trx_with_bts_name(filepath)
15
 
16
  UtilsVars.final_all_database = convert_dfs(
17
+ UtilsVars.all_db_dfs, ["GSM", "WCDMA", "LTE_FDD", "LTE_TDD", "TRX"]
18
  )
queries/process_gsm.py CHANGED
@@ -63,7 +63,7 @@ def process_gsm_data(file_path: str):
63
  # Read the specific sheet into a DataFrame
64
  dfs = pd.read_excel(
65
  file_path,
66
- sheet_name=["BTS", "BCF", "TRX"],
67
  engine="calamine",
68
  skiprows=[0],
69
  )
@@ -112,6 +112,7 @@ def process_gsm_data(file_path: str):
112
  df_bcf.rename(columns={"name": "site_name"}, inplace=True)
113
  df_bcf = df_bcf[BCF_COLUMNS]
114
 
 
115
  df_trx = process_trx_data(file_path)
116
 
117
  # create band dataframe
 
63
  # Read the specific sheet into a DataFrame
64
  dfs = pd.read_excel(
65
  file_path,
66
+ sheet_name=["BTS", "BCF"],
67
  engine="calamine",
68
  skiprows=[0],
69
  )
 
112
  df_bcf.rename(columns={"name": "site_name"}, inplace=True)
113
  df_bcf = df_bcf[BCF_COLUMNS]
114
 
115
+ # Process TRX data
116
  df_trx = process_trx_data(file_path)
117
 
118
  # create band dataframe
queries/process_trx.py CHANGED
@@ -13,7 +13,52 @@ TRX_COLUMNS = [
13
  ]
14
 
15
 
16
- def process_trx_data(file_path: str):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  """
18
  Process data from the specified file path.
19
 
@@ -23,7 +68,7 @@ def process_trx_data(file_path: str):
23
  # Read the specific sheet into a DataFrame
24
  dfs = pd.read_excel(
25
  file_path,
26
- sheet_name=["BTS", "BCF", "TRX"],
27
  engine="calamine",
28
  skiprows=[0],
29
  )
@@ -40,8 +85,15 @@ def process_trx_data(file_path: str):
40
  "count"
41
  )
42
 
43
- bcch = df_trx[df_trx["channel0Type"] == 4]
44
- tch = df_trx[df_trx["channel0Type"] == 3][["ID_BTS", "initialFrequency"]]
 
 
 
 
 
 
 
45
 
46
  tch = tch.pivot_table(
47
  index="ID_BTS",
@@ -54,19 +106,46 @@ def process_trx_data(file_path: str):
54
  # rename the columns
55
  tch.columns = ["ID_BTS", "TCH"]
56
 
57
- # Merge dataframes
58
-
59
- df_trx = pd.merge(bcch, tch, on="ID_BTS", how="left")
60
  # rename "initialFrequency" to "BCCH"
61
- df_trx = df_trx.rename(columns={"initialFrequency": "BCCH"})
62
- df_trx = df_trx[TRX_COLUMNS]
63
 
64
- # Save dataframes
65
- # save_dataframe(df_trx, "trx")
66
- # df_2g2 = save_dataframe(df_2g, "2g")
67
 
68
- # UtilsVars.final_gsm_database = convert_dfs([df_2g], ["GSM"])
69
- return df_trx
70
 
 
 
 
 
71
 
72
- # process_trx_data(r"data2\20240805_5810_05082024_Dump.xml.gz.xlsb")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  ]
14
 
15
 
16
+ TRX_BTS_COLUMNS = [
17
+ "BSC",
18
+ "BCF",
19
+ "BTS",
20
+ "TRX",
21
+ "ID_BTS",
22
+ "number_trx_per_cell",
23
+ "number_trx_per_site",
24
+ "code",
25
+ "name",
26
+ "adminState",
27
+ "bbUnitSupportsEdge",
28
+ "channel0Maio",
29
+ "channel0Type",
30
+ "channel1Maio",
31
+ "channel1Type",
32
+ "channel2Maio",
33
+ "channel2Type",
34
+ "channel3Maio",
35
+ "channel3Type",
36
+ "channel4Maio",
37
+ "channel4Type",
38
+ "channel5Maio",
39
+ "channel5Type",
40
+ "channel6Maio",
41
+ "channel6Type",
42
+ "channel7Maio",
43
+ "channel7Type",
44
+ "initialFrequency",
45
+ "lapdLinkName",
46
+ "lapdLinkNumber",
47
+ "mcpaTrxNumber",
48
+ "mcpaTrxPortId",
49
+ "mcpaTrxPosition",
50
+ "numberOfTrxRfPowerLevels",
51
+ "optimumRxLevDL",
52
+ "optimumRxLevUL",
53
+ "preferredBcchMark",
54
+ "trxAbilities",
55
+ "trxFrequencyType",
56
+ "trxRfPower",
57
+ "tsc",
58
+ ]
59
+
60
+
61
+ def process_brute_trx_data(file_path: str):
62
  """
63
  Process data from the specified file path.
64
 
 
68
  # Read the specific sheet into a DataFrame
69
  dfs = pd.read_excel(
70
  file_path,
71
+ sheet_name=["TRX"],
72
  engine="calamine",
73
  skiprows=[0],
74
  )
 
85
  "count"
86
  )
87
 
88
+ return df_trx
89
+
90
+
91
+ def process_trx_data(file_path: str):
92
+
93
+ df_gsm_trx = process_brute_trx_data(file_path=file_path).copy()
94
+
95
+ bcch = df_gsm_trx[df_gsm_trx["channel0Type"] == 4]
96
+ tch = df_gsm_trx[df_gsm_trx["channel0Type"] == 3][["ID_BTS", "initialFrequency"]]
97
 
98
  tch = tch.pivot_table(
99
  index="ID_BTS",
 
106
  # rename the columns
107
  tch.columns = ["ID_BTS", "TCH"]
108
 
109
+ df_gsm_trx = pd.merge(bcch, tch, on="ID_BTS", how="left")
 
 
110
  # rename "initialFrequency" to "BCCH"
111
+ df_gsm_trx = df_gsm_trx.rename(columns={"initialFrequency": "BCCH"})
112
+ df_gsm_trx = df_gsm_trx[TRX_COLUMNS]
113
 
114
+ return df_gsm_trx
 
 
115
 
 
 
116
 
117
+ def trx_with_bts_name(file_path: str):
118
+
119
+ df_gsm_trx = process_brute_trx_data(file_path=file_path).copy()
120
+ df_gsm_trx.drop(["name"], axis=1, inplace=True)
121
 
122
+ # Process TRX data
123
+ dfs = pd.read_excel(
124
+ file_path,
125
+ sheet_name=["BTS"],
126
+ engine="calamine",
127
+ skiprows=[0],
128
+ )
129
+ df_bts = dfs["BTS"]
130
+ df_bts.columns = df_bts.columns.str.replace(r"[ ]", "", regex=True)
131
+ df_bts["code"] = df_bts["name"].str.split("_").str[0].astype(int)
132
+ df_bts["ID_BTS"] = df_bts[["BSC", "BCF", "BTS"]].astype(str).apply("_".join, axis=1)
133
+ df_bts = df_bts[["ID_BTS", "code", "name"]]
134
+
135
+ df_trx_bts_name = pd.merge(df_gsm_trx, df_bts, on="ID_BTS", how="left")
136
+ df_trx_bts_name = df_trx_bts_name[TRX_BTS_COLUMNS]
137
+
138
+ UtilsVars.all_db_dfs.append(df_trx_bts_name)
139
+
140
+ return df_trx_bts_name
141
+
142
+
143
+ def process_trx_with_bts_name_data_to_excel(file_path: str):
144
+ """
145
+ Process data from the specified file path and save it to a excel file.
146
+
147
+ Args:
148
+ file_path (str): The path to the file.
149
+ """
150
+ trx_bts_name = trx_with_bts_name(file_path)
151
+ UtilsVars.final_trx_database = convert_dfs([trx_bts_name], ["TRX"])
utils/check_sheet_exist.py CHANGED
@@ -6,6 +6,7 @@ class Technology:
6
  wcdma = False
7
  lte = False
8
  neighbors = False
 
9
 
10
 
11
  # Dictionary of sheet groups to check
@@ -14,6 +15,7 @@ sheets_to_check = {
14
  "neighbors": ["ADCE", "ADJS", "ADJI", "ADJG", "ADJW", "BTS", "WCEL"],
15
  "wcdma": ["WCEL", "WBTS", "WNCEL"],
16
  "lte": ["LNBTS", "LNCEL", "LNCEL_FDD", "LNCEL_TDD"],
 
17
  }
18
 
19
 
 
6
  wcdma = False
7
  lte = False
8
  neighbors = False
9
+ trx = False
10
 
11
 
12
  # Dictionary of sheet groups to check
 
15
  "neighbors": ["ADCE", "ADJS", "ADJI", "ADJG", "ADJW", "BTS", "WCEL"],
16
  "wcdma": ["WCEL", "WBTS", "WNCEL"],
17
  "lte": ["LNBTS", "LNCEL", "LNCEL_FDD", "LNCEL_TDD"],
18
+ "trx": ["TRX", "BTS"],
19
  }
20
 
21
 
utils/utils_vars.py CHANGED
@@ -40,6 +40,7 @@ class UtilsVars:
40
  final_lte_database = ""
41
  final_gsm_database = ""
42
  final_wcdma_database = ""
 
43
  all_db_dfs = []
44
  final_all_database = ""
45
  neighbors_database = ""
 
40
  final_lte_database = ""
41
  final_gsm_database = ""
42
  final_wcdma_database = ""
43
+ final_trx_database = ""
44
  all_db_dfs = []
45
  final_all_database = ""
46
  neighbors_database = ""