Commit 2cc918d3 authored by suryakant's avatar suryakant

Sterlite Custom Report Updates

parent 1b8ff03e
......@@ -51,6 +51,8 @@ class CommonConstants:
START_DATE = "start_date"
END_DATE = "end_date"
FREQUENCY = 'D'
START_COLUMN = 0
START_ROW = 2
class ReportType:
......
......@@ -298,7 +298,7 @@ class SterliteRefineryQuery:
'No.' AS UOM,
0 AS NORMS,
SUM(METRIC_VALUE), 2 AS ON_DATE
ROUND(SUM(METRIC_VALUE), 2) AS ON_DATE
FROM SEMANTIC_PROD.REFINERY_HSE_VIEW
WHERE LOWER(TRIM(KPI)) in ('first aid')
AND DATE BETWEEN '{day_start_date}' AND '{day_end_date}'
......@@ -314,7 +314,7 @@ class SterliteRefineryQuery:
'No.' AS UOM,
0 AS NORMS,
SUM(METRIC_VALUE), 2 AS MTD
ROUND(SUM(METRIC_VALUE), 2) AS MTD
FROM SEMANTIC_PROD.REFINERY_HSE_VIEW
WHERE LOWER(TRIM(KPI)) in ('first aid')
AND DATE BETWEEN '{month_start_date}' AND '{month_end_date}'
......@@ -330,7 +330,7 @@ class SterliteRefineryQuery:
'No.' AS UOM,
0 AS NORMS,
SUM(METRIC_VALUE), 2 AS YTD
ROUND(SUM(METRIC_VALUE), 2) AS YTD
FROM SEMANTIC_PROD.REFINERY_HSE_VIEW
WHERE LOWER(TRIM(KPI)) in ('first aid')
AND DATE BETWEEN '{year_start_date}' AND '{year_end_date}'
......
......@@ -70,6 +70,8 @@ class CustomReportHandler:
if input_json[each_blocks][CommonConstants.QUERY]:
for each_kpi in input_json[each_blocks][CommonConstants.QUERY]:
logger.info(f"KPI: {each_kpi}")
temp_data_dict = dict()
append_flag = False
......@@ -93,6 +95,7 @@ class CustomReportHandler:
CommonConstants.YEAR_END_DATE])
response = self.postgres_db_obj.fetch_data(query=query)
logger.info(f"Response length: {len(response)}")
if response:
if len(response) <= 1:
temp_data_dict.update(dict(response[0]))
......@@ -102,11 +105,12 @@ class CustomReportHandler:
input_json[each_blocks][CommonConstants.DATA]. \
append(dict(every_data))
if not temp_data_dict and append_flag:
if not temp_data_dict and not append_flag:
# Creating null values if no data
for each_columns in input_json[each_blocks][
CommonConstants.DATA_COLUMN]:
temp_data_dict.update({each_columns: None})
append_flag = True
if append_flag:
input_json[each_blocks][CommonConstants.DATA].append(
......@@ -119,34 +123,37 @@ class CustomReportHandler:
{each_columns: None}
)
input_json[each_blocks][CommonConstants.DATA].append(temp_data_dict)
logger.info(f"Block: {input_json[each_blocks]}")
return input_json
def write_dataframe_to_excel(self, input_json, sheet_name):
def write_dataframe_to_excel(
self, input_json, writer, sheet_name, start_col, start_row
):
"""
:param input_json:
:param writer:
:param sheet_name:
:param start_col:
:param start_row:
:return:
"""
dataframe_list = []
print(input_json)
print(sheet_name)
print(len(input_json))
if len(input_json) >= 2:
print("2 block")
# concatenated_df = pd.concat(
# [dataframes_to_concat[i], dataframes_to_concat[i + 1]],
for each_blocks in input_json:
print(input_json[each_blocks])
else:
print("1 block")
print()
# for each_blocks in input_json:
# print(each_blocks)
# dataframe = pd.DataFrame(data=input_json[each_blocks]["data"])
# dataframe.columns = input_json[each_blocks]["columns"]
# dataframe_list.append(dataframe)
dataframes_list = []
# Iterate through the categories and concatenate their data
for category, category_data in input_json.items():
data = category_data.get('data', [])
data_frame = pd.DataFrame(data)
data_frame.columns = category_data.get('columns', [])
dataframes_list.append(data_frame)
# Concatenate all DataFrames vertically (along rows)
result_df = pd.concat(dataframes_list, axis=1, ignore_index=False)
result_df.to_excel(
writer, sheet_name=sheet_name,
startcol=start_col, startrow=start_row,
header=True, index=False
)
return result_df.shape[0]
def custom_report_handler(self, input_json):
"""
......@@ -163,31 +170,39 @@ class CustomReportHandler:
# Getting custom date range using start date and end date
date_filter = self.create_custom_date_filter(input_json=input_json)
# with pd.ExcelWriter(path="testing.xlsx", engine="openpyxl") as excel:
# pass
with pd.ExcelWriter("output.xlsx", engine="openpyxl") as writer:
for each_date_range in date_filter:
for each_date_range in date_filter:
logger.info(f"date filter: {each_date_range}")
report_template = copy.deepcopy(
SterliteRefineryTemplate.REPORT_TEMPLATE
)
# Iterating over sterlite json file
for each_blocks in report_template:
# Getting the data from queries
each_blocks = self.get_queries_from_db(
input_json=each_blocks, date_filter=each_date_range
report_template = copy.deepcopy(
SterliteRefineryTemplate.REPORT_TEMPLATE
)
print("=" * 60)
print(each_blocks)
self.write_dataframe_to_excel(
input_json=each_blocks,
sheet_name=each_date_range[CommonConstants.DAY_START_DATE]
)
break
start_col = CommonConstants.START_COLUMN
start_row = CommonConstants.START_ROW
# Iterating over sterlite json file
for each_blocks in report_template:
logger.info("Getting the data from queries")
each_blocks = self.get_queries_from_db(
input_json=each_blocks, date_filter=each_date_range
)
logger.info(f"start_row: {start_row}")
logger.info("Writing each block into excel sheet")
last_row_shape = self.write_dataframe_to_excel(
input_json=each_blocks,
writer=writer,
sheet_name=each_date_range[
CommonConstants.DAY_START_DATE],
start_col=start_col,
start_row=start_row,
print("@" * 100)
print(report_template)
break
)
logger.info(f"Last row number: {last_row_shape}")
start_row += last_row_shape + 2
logger.info(f"end_row: {start_row}")
except GeneralException as err:
logger.error(f"Exception in custom_report_handler: {err}")
......
......@@ -232,6 +232,21 @@ class SterliteRefineryTemplate:
"data_column": ["crop", "banks", "total_cells",
"cell_voltage", "t_shorts"],
"format": "",
},
"DEPOSIT LOSS (-) / GAIN (+) DETAILS": {
"columns": [
"CATEGORY",
"ON DATE",
"TILL DATE"
],
"query": [],
"data": [],
"data_column": [
"category",
"on_date",
"till_date"
],
"format": "",
}
},
{
......@@ -265,13 +280,12 @@ class SterliteRefineryTemplate:
"MTD",
"Filtered Volume",
"Total Volume Refinery",
"ON DATE",
],
"query": [],
"data": [],
"data_column": [
"filter_availability", "on_date", "mtd", "filtered_volume",
"total_volume_refinery", "on_date"],
"total_volume_refinery"],
"format": "",
}
},
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment