Commit fab8800d authored by suryakant's avatar suryakant

ACP Custom Report Changes

parent e7346d40
...@@ -62,7 +62,10 @@ class ReportType: ...@@ -62,7 +62,10 @@ class ReportType:
""" """
REFINERY_REPORT = "refinery_report" REFINERY_REPORT = "refinery_report"
ACP_REPORT = "acp_report"
REFINERY_WORKSHEET_NAME = "REFINERY DAILY PERFORMANCE REPORT" REFINERY_WORKSHEET_NAME = "REFINERY DAILY PERFORMANCE REPORT"
ACP_WORKSHEET_NAME = "VEDANTA LIMITED \n ANODE CASTING PLANT (ACP) - " \
"SHIFT INCHARGE LOG SHEET"
class PostgresConstant: class PostgresConstant:
......
...@@ -3713,3 +3713,171 @@ class SterliteRefineryQuery: ...@@ -3713,3 +3713,171 @@ class SterliteRefineryQuery:
FROM PUBLIC.REFINERY_PRODUCTION_DAY_WISE RPDW FROM PUBLIC.REFINERY_PRODUCTION_DAY_WISE RPDW
WHERE DATE("Date") = '{day_start_date}' WHERE DATE("Date") = '{day_start_date}'
""" """
class ACPQuery:
"""
Refinery report queries
"""
class Particulars:
""" PARTICULARS """
QUERY_1 = """
SELECT
'anode_poduction' AS particulars,
'MT' AS units,
ROUND(SUM(ANODE_PRODUCTION_FURNANCE_1 + ANODE_PRODUCTION_FURNANCE_2)::NUMERIC,
2) AS on_date
FROM SEMANTIC_PROD.ACP_DAILY_DATAENTRY
WHERE DATE='{day_start_date}'
GROUP BY 1
"""
QUERY_2 = """
SELECT
'anode_poduction' AS particulars,
'MT' AS units,
ROUND(SUM(ANODE_PRODUCTION_FURNANCE_1 + ANODE_PRODUCTION_FURNANCE_2)::NUMERIC,
2) AS mtd
FROM SEMANTIC_PROD.ACP_DAILY_DATAENTRY
WHERE DATE BETWEEN '{month_start_date}' AND '{month_end_date}'
GROUP BY 1
"""
class FO:
""" PARTICULARS """
QUERY_1 = """
SELECT
'fo_consumption_actual'::text AS particulars,
'Lits/MT' AS units,
ROUND((SUM(METRIC_VALUE) / SUM(ACTUAL_METRIC))::NUMERIC, 2) AS on_date
FROM
(SELECT SUM(ACP_DAILY_PRODUCTION_DATA_2.FO_CONSUMPTION_TOTAL_CONS) AS
METRIC_VALUE,
DATE(ACP_DAILY_PRODUCTION_DATA_2.DATE) AS date
FROM SEMANTIC_PROD.ACP_DAILY_PRODUCTION_DATA_2
GROUP BY 'fo_consumption_actual'::text,
(DATE(ACP_DAILY_PRODUCTION_DATA_2.DATE)))MAIN
INNER JOIN
(SELECT SUM(ANODE_PRODUCTION_FURNANCE_1 + ANODE_PRODUCTION_FURNANCE_2)
AS ACTUAL_METRIC, date
FROM SEMANTIC_PROD.ACP_DAILY_DATAENTRY
GROUP BY 2) ACP ON DATE(ACP.DATE) = DATE(MAIN.DATE)
WHERE MAIN.DATE BETWEEN '{day_start_date}' AND '{day_end_date}'
GROUP BY 1,2
"""
QUERY_2 = """
SELECT
'fo_consumption_actual'::text AS particulars,
'Lits/MT' AS units,
ROUND((SUM(METRIC_VALUE) / SUM(ACTUAL_METRIC))::NUMERIC, 2) AS mtd
FROM
(SELECT SUM(ACP_DAILY_PRODUCTION_DATA_2.FO_CONSUMPTION_TOTAL_CONS) AS
METRIC_VALUE,
DATE(ACP_DAILY_PRODUCTION_DATA_2.DATE) AS date
FROM SEMANTIC_PROD.ACP_DAILY_PRODUCTION_DATA_2
GROUP BY 'fo_consumption_actual'::text,
(DATE(ACP_DAILY_PRODUCTION_DATA_2.DATE)))MAIN
INNER JOIN
(SELECT SUM(ANODE_PRODUCTION_FURNANCE_1 + ANODE_PRODUCTION_FURNANCE_2)
AS ACTUAL_METRIC, date
FROM SEMANTIC_PROD.ACP_DAILY_DATAENTRY
GROUP BY 2) ACP ON DATE(ACP.DATE) = DATE(MAIN.DATE)
WHERE MAIN.DATE BETWEEN '{month_start_date}' AND '{month_end_date}'
GROUP BY 1,2
"""
class Rejection:
""" PARTICULARS """
QUERY_1 = """
SELECT KPI AS particulars,
"uom" AS units,
ROUND((CASE
WHEN KPI LIKE 'Anode Rejection Actual'
THEN SUM(VALUE) / NULLIF(SUM(TOTAL), 0) * 100
ELSE NULL END)::NUMERIC,
2) on_date
FROM (
(SELECT KPI,
'%' AS UOM,
ACP_REJ + PROD_ACP AS TOTAL,
ACP_REJ AS VALUE, date
FROM SEMANTIC_PROD.ANODE_REJECTION_KPI_VIEW
WHERE KPI = 'Anode Rejection Actual')) MAIN
WHERE DATE BETWEEN '{day_start_date}' AND '{day_end_date}'
GROUP BY 1,2;
"""
QUERY_2 = """
SELECT KPI AS particulars,
"uom" AS units,
ROUND((CASE
WHEN KPI LIKE 'Anode Rejection Actual'
THEN SUM(VALUE) / NULLIF(SUM(TOTAL), 0) * 100
ELSE NULL END)::NUMERIC,
2) mtd
FROM (
(SELECT KPI,
'%' AS UOM,
ACP_REJ + PROD_ACP AS TOTAL,
ACP_REJ AS VALUE, date
FROM SEMANTIC_PROD.ANODE_REJECTION_KPI_VIEW
WHERE KPI = 'Anode Rejection Actual')) MAIN
WHERE DATE BETWEEN '{month_start_date}' AND '{month_end_date}'
GROUP BY 1,2;
"""
class PersonsOnDuty:
""" Persons on duty """
QUERY_1 = """
SELECT
PERSONS_ON_DUTY,
A_SHIFT,
B_SHIFT,
C_SHIFT
FROM SEMANTIC_PROD.SHIFT_MANPOWER_ACP
WHERE DATE='{day_start_date}'
"""
class AnodeLugThickness:
""" ANODE LUG THICKNESS """
QUERY_1 = """
SELECT
ANODE_DIMENSION_LUG_THICKNESS AS anode_lug_thickness,
ANODE_DIMENSION_BODY_THICKNESS AS anode_body_thickness
FROM SEMANTIC_PROD.ACP_CTQ
WHERE DATE = '{day_start_date}'
"""
class AShiftCommunication:
""" A-SHIFT COMMUNICATION """
QUERY_1 = """
SELECT
SI_NUMBER AS s_no,
ACP_DESCRIPTION AS a_shift_comm
FROM SEMANTIC_PROD.ACP_SHIFT_ACTIVITY_TRACKER_1
WHERE
SHIFT = 'Shift A'
AND DATE = '{day_start_date}'
"""
class ShiftwiseData:
""" A-SHIFT COMMUNICATION """
QUERY_1 = """
SELECT
ANODE_POSITION_A AS mould,
SET_ANODE_WEIGHT_A AS set_value,
WEIGH_BRIDGE_A AS field_wt
FROM SEMANTIC_PROD.ACP_ANODE_WEIGHT
WHERE LOWER(SHIFT) = 'shift a'
AND DATE(date) = '{day_start_date}'
"""
\ No newline at end of file
...@@ -3,7 +3,8 @@ import copy ...@@ -3,7 +3,8 @@ import copy
import pandas as pd import pandas as pd
from datetime import datetime from datetime import datetime
from scripts.constants import ReportType, CommonConstants, PostgresConstant from scripts.constants import ReportType, CommonConstants, PostgresConstant
from scripts.template.sterlite_report_template import SterliteRefineryTemplate from scripts.template.refinery_report_template import SterliteRefineryTemplate
from scripts.template.acp_report_template import ACPReportTemplate
from scripts.core.logging.application_logging import logger from scripts.core.logging.application_logging import logger
from scripts.core.exception.app_exceptions import GeneralException from scripts.core.exception.app_exceptions import GeneralException
from scripts.core.utilities.postgresql_db_utils import PostgresDBUtility from scripts.core.utilities.postgresql_db_utils import PostgresDBUtility
...@@ -160,12 +161,9 @@ class CustomReportHandler: ...@@ -160,12 +161,9 @@ class CustomReportHandler:
def write_dataframe_to_excel( def write_dataframe_to_excel(
self, self,
input_json, input_json, writer, workbook, sheet_name, start_col,
writer, start_row, header_merge_format, column_merge_format,
workbook, blank_merge_format
sheet_name,
start_col,
start_row
): ):
""" """
:param input_json: :param input_json:
...@@ -174,6 +172,9 @@ class CustomReportHandler: ...@@ -174,6 +172,9 @@ class CustomReportHandler:
:param sheet_name: :param sheet_name:
:param start_col: :param start_col:
:param start_row: :param start_row:
:param header_merge_format:
:param column_merge_format:
:param blank_merge_format:
:return: :return:
""" """
dataframes_list = [] dataframes_list = []
...@@ -227,23 +228,19 @@ class CustomReportHandler: ...@@ -227,23 +228,19 @@ class CustomReportHandler:
for merge_index, border_value in border_json.items(): for merge_index, border_value in border_json.items():
# (Start Rows, Start Column, End Row, End Column, Title, Format) # (Start Rows, Start Column, End Row, End Column, Title, Format)
worksheet.merge_range(*merge_index, border_value, worksheet.merge_range(*merge_index, border_value,
workbook.add_format( workbook.add_format(header_merge_format))
SterliteRefineryTemplate.COLUMN_HEADER_FORMAT)
)
# Write the column headers with the defined format. # Write the column headers with the defined format.
for col_index, value in enumerate(result_df.columns.values): for col_index, value in enumerate(result_df.columns.values):
if value: if value:
worksheet.write( worksheet.write(
start_row, col_index, value, start_row, col_index, value,
workbook.add_format( workbook.add_format(column_merge_format)
SterliteRefineryTemplate.COLUMN_HEADER_FORMAT)
) )
else: else:
worksheet.write( worksheet.write(
start_row, col_index, value, start_row, col_index, value,
workbook.add_format( workbook.add_format(blank_merge_format)
SterliteRefineryTemplate.BLANK_COLUMN_HEADER_FORMAT)
) )
logger.info(f"Shape of current data frame is {result_df.shape}") logger.info(f"Shape of current data frame is {result_df.shape}")
...@@ -258,9 +255,10 @@ class CustomReportHandler: ...@@ -258,9 +255,10 @@ class CustomReportHandler:
message = "Error generating a message" message = "Error generating a message"
data = input_json["file_name"] data = input_json["file_name"]
try: try:
logger.info(f"Report: {str(input_json['job_type']).lower()}")
# if str(input_json.job_type).lower() == ReportType.REFINERY_REPORT: # if str(input_json.job_type).lower() == ReportType.REFINERY_REPORT:
if str(input_json["job_type"]).lower() == ReportType.REFINERY_REPORT: if str(input_json["job_type"]).lower() == ReportType.REFINERY_REPORT:
logger.info("Generating custom date filter with in the range") logger.info("Generating custom date filter with in the range")
# Getting custom date range using start date and end date # Getting custom date range using start date and end date
...@@ -304,6 +302,9 @@ class CustomReportHandler: ...@@ -304,6 +302,9 @@ class CustomReportHandler:
sheet_name=sheet_name, sheet_name=sheet_name,
start_col=start_col, start_col=start_col,
start_row=start_row, start_row=start_row,
header_merge_format=SterliteRefineryTemplate.COLUMN_HEADER_FORMAT,
column_merge_format=SterliteRefineryTemplate.COLUMN_HEADER_FORMAT,
blank_merge_format=SterliteRefineryTemplate.BLANK_COLUMN_HEADER_FORMAT
) )
if total_column < shape[1]: if total_column < shape[1]:
...@@ -334,6 +335,84 @@ class CustomReportHandler: ...@@ -334,6 +335,84 @@ class CustomReportHandler:
workbook.formats[0].set_align('center') workbook.formats[0].set_align('center')
logger.info(f"Report completed for: {each_date_range}") logger.info(f"Report completed for: {each_date_range}")
if str(input_json["job_type"]).lower() == ReportType.ACP_REPORT:
logger.info("Generating custom date filter with in the range")
# Getting custom date range using start date and end date
date_filter = self.create_custom_date_filter(input_json=input_json)
with pd.ExcelWriter(
input_json["file_name"],
engine="xlsxwriter") as writer:
for each_date_range in date_filter:
logger.info(f"date filter: {each_date_range}")
report_template = copy.deepcopy(
ACPReportTemplate.REPORT_TEMPLATE
)
start_col = CommonConstants.START_COLUMN
start_row = CommonConstants.START_ROW
total_column = 0
workbook = writer.book
sheet_name = datetime.strptime(
each_date_range[CommonConstants.DAY_START_DATE],
"%Y-%m-%d").strftime("%d %b %Y")
worksheet = None
# Iterating over sterlite json file
for each_blocks in report_template:
logger.info("Fetching each KPI data from queries")
each_blocks = self.get_queries_from_db(
input_json=each_blocks, date_filter=each_date_range
)
logger.info("Writing each block into excel sheet")
shape, worksheet, header_flag = \
self.write_dataframe_to_excel(
input_json=each_blocks,
writer=writer,
workbook=workbook,
sheet_name=sheet_name,
start_col=start_col,
start_row=start_row,
header_merge_format=ACPReportTemplate.COLUMN_HEADER_FORMAT,
column_merge_format=ACPReportTemplate.COLUMN_HEADER_FORMAT,
blank_merge_format=ACPReportTemplate.BLANK_COLUMN_HEADER_FORMAT
)
if total_column < shape[1]:
total_column = shape[1]
start_row += shape[0] + 2
if header_flag:
start_row += 1
# Add a header format.
main_header_format = workbook.add_format(
ACPReportTemplate.WORKSHEET_HEADER_FORMAT)
logger.info("Creating Header for each sheet")
# (Merge Rows, Start Column, '', Total Column, Title, Format)
worksheet.merge_range(1, 0, 0, 0, sheet_name,
main_header_format)
worksheet.merge_range(1, 1, 0, total_column - 1,
ReportType.ACP_WORKSHEET_NAME,
main_header_format)
# Setting width to the column
worksheet.set_column(0, total_column,
CommonConstants.OVERALL_COLUMN_WIDTH)
# Center alignment of Excel data
workbook.formats[0].set_align('center')
logger.info(f"Report completed for: {each_date_range}")
except GeneralException as err: except GeneralException as err:
logger.error(f"Exception in custom_report_handler: {err}") logger.error(f"Exception in custom_report_handler: {err}")
return status, message, data return status, message, data
from scripts.constants import PostgresConstant
from scripts.core.db.postgres.custom_report_query import ACPQuery
class ACPReportTemplate:
WORKSHEET_HEADER_FORMAT = {
"bold": True,
"align": "center",
"valign": "center",
"fg_color": "#e6e7eb",
"font_color": "#1b314f",
"border": 1,
'font_size': '20',
}
COLUMN_HEADER_FORMAT = {
"bold": True,
"align": "center",
"valign": "center",
"fg_color": "#e6e7eb",
"font_color": "#021b5e",
"border": 1,
}
BLANK_COLUMN_HEADER_FORMAT = {
"bold": True,
"align": "center",
"valign": "center",
}
REPORT_TEMPLATE = [
{
"BLANK": {
"columns": [None],
"data": [],
"query": {},
"data_column": [None]
},
},
{
"PARTICULARS": {
"columns": [
"PARTICULARS", "UNITS", "ON DATE", "MTD"
],
"query": {
"Production": [
ACPQuery.Particulars.QUERY_1,
ACPQuery.Particulars.QUERY_2,
],
"FO": [
ACPQuery.FO.QUERY_1,
ACPQuery.FO.QUERY_2,
],
"PNG/LPG": [
],
"Rejection": [
ACPQuery.Rejection.QUERY_1,
ACPQuery.Rejection.QUERY_2,
],
"Power": [
]
},
"data": [],
"data_column": [
"particulars", "units", "on_date", "mtd"
],
},
"BLANK": {
"columns": [None],
"data": [],
"query": {},
"data_column": [None]
},
"PERSONS ON DUTY": {
"columns": [
"PERSONS ON DUTY", "A", "B", "C"
],
"query": {
"PersonsOnDuty": [
ACPQuery.PersonsOnDuty.QUERY_1,
]
},
"data": [],
"data_column": [
"persons_on_duty", "a", "b", "c"
],
},
"BLANK1": {
"columns": [None],
"data": [],
"query": {},
"data_column": [None]
},
"ANODE LUG THICKNESS": {
"columns": [
"ANODE LUG THICKNESS (22-32 mm)", "ANODE BODY THICKNESS (31-41 mm)"
],
"query": {
"AnodeLugThickness": [
ACPQuery.AnodeLugThickness.QUERY_1,
]
},
"data": [],
"data_column": [
"anode_lug_thickness", "anode_body_thickness"
],
},
},
{
"A-SHIFT COMMUNICATION": {
"columns": [
"S.No", "A-SHIFT COMMUNICATION"
],
"query": {
"AShiftCommunication": [
ACPQuery.AShiftCommunication.QUERY_1,
],
},
"data": [],
"data_column": [
"s_no", "a_shift_comm"
],
},
"SHIFTWISE DATA": {
"columns": [
"MOULD", "SET VALUE", "FIELD WT"
],
"query": {
"ShiftwiseData": [
ACPQuery.PersonsOnDuty.QUERY_1,
]
},
"data": [],
"data_column": [
"mould", "set_value", "field_wt"
],
"addition": {
"merge_header": "SHIFTWISE DATA"
}
}
}
]
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment