Commit 975dc483 authored by suryakant's avatar suryakant

ACP Custom Report Changes

parent 2b09445e
......@@ -60,6 +60,7 @@ class PostgresDetails(BaseSettings):
port: str
username: str
password: str
database: str = ""
class Config:
env_prefix = "POSTGRES_"
......
......@@ -53,7 +53,8 @@ class CommonConstants:
FREQUENCY = 'D'
START_COLUMN = 0
START_ROW = 2
OVERALL_COLUMN_WIDTH = 27
REFINERY_COLUMN_WIDTH = 27
ACP_COLUMN_WIDTH = 24
class ReportType:
......@@ -64,8 +65,9 @@ class ReportType:
REFINERY_REPORT = "refinery_report"
ACP_REPORT = "acp_report"
REFINERY_WORKSHEET_NAME = "REFINERY DAILY PERFORMANCE REPORT"
ACP_WORKSHEET_NAME = "VEDANTA LIMITED \n ANODE CASTING PLANT (ACP) - " \
"SHIFT INCHARGE LOG SHEET"
ACP_WORKSHEET_NAME_1 = "VEDANTA LIMITED"
ACP_WORKSHEET_NAME_2 = "ANODE CASTING PLANT (ACP) - SHIFT INCHARGE LOG SHEET"
DATE = "DATE"
class PostgresConstant:
......
......@@ -3835,10 +3835,10 @@ class ACPQuery:
""" Persons on duty """
QUERY_1 = """
SELECT
PERSONS_ON_DUTY,
A_SHIFT,
B_SHIFT,
C_SHIFT
PERSONS_ON_DUTY AS persons_on_duty,
A_SHIFT as a,
B_SHIFT as b,
C_SHIFT as c
FROM SEMANTIC_PROD.SHIFT_MANPOWER_ACP
WHERE DATE='{day_start_date}'
"""
......@@ -3990,8 +3990,14 @@ class ACPQuery:
class MeltTemperature:
""" PARAMETERS """
QUERY_1 = """
SELECT
'MELT TEMPERATURE' AS parameters,
'1120-1140 DEG C' AS range,
SUM(COALESCE(MELT_TEMP, 0)) AS value
AND DATE(date) = '{day_start_date}'
FROM SEMANTIC_PROD.FURNACE_1_CTP
WHERE DATE = '{day_start_date}'
GROUP BY 1;
"""
class MouldTemperature:
......@@ -4118,3 +4124,45 @@ class ACPQuery:
WHERE LOWER(SUBSTRING(ANODE_SHIFT_SELECTION,1,1)) = 'c'
AND DATE(date) = '{day_start_date}'
"""
class FurnaceShiftA:
""" FURNACE SHIFT A """
QUERY_1 = """
SELECT
SHIFTA_FURNACE AS furnace,
BURNER_1 as burner_1,
BURNER_2 as burner_2,
BURNER_3 as burner_3
FROM SEMANTIC_PROD.ACP_BURNER_CLEANING
WHERE LOWER(SHIFT) = 'shift a'
AND DATE(date) = '{day_start_date}'
"""
class FurnaceShiftB:
""" FURNACE SHIFT B """
QUERY_1 = """
SELECT
SHIFTB_FURNACE AS furnace,
BURNER1_B AS burner_1,
BURNER2_B AS burner_2,
BURNER3_B AS burner_3
FROM SEMANTIC_PROD.ACP_BURNER_CLEANING
WHERE LOWER(SHIFT) = 'shift b'
AND DATE(date) = '{day_start_date}'
"""
class FurnaceShiftC:
""" FURNACE SHIFT C """
QUERY_1 = """
SELECT
SHIFTC_FURNACE AS furnace,
BURNER1_C AS burner_1,
BURNER2_C AS burner_2,
BURNER3_C AS burner_3
FROM SEMANTIC_PROD.ACP_BURNER_CLEANING
WHERE LOWER(SHIFT) = 'shift c'
AND DATE(date) = '{day_start_date}'
"""
......@@ -20,45 +20,57 @@ class CustomReportHandler:
:param input_json:
:return:
"""
flag = False
date_range_list = []
# Fetching Start date
start_date = datetime.strptime(
input_json[CommonConstants.PROPERTY][CommonConstants.START_DATE],
CommonConstants.DATE_TIME_FORMAT
)
# Fetching End date
end_date = datetime.strptime(
input_json[CommonConstants.PROPERTY][CommonConstants.END_DATE],
CommonConstants.DATE_TIME_FORMAT
)
logger.info(f"Creating list of dates starting from {start_date} to {end_date}")
# Generating a date range
date_list = pd.date_range(start_date, end_date, freq=CommonConstants.FREQUENCY)
# Iterating and creating where clause filters
for each_dates in date_list.strftime(
CommonConstants.DATE_TIME_FORMAT).to_list():
# To get the each_dates - financial year
financial_year = None
date_obj = datetime.strptime(each_dates, CommonConstants.DATE_TIME_FORMAT)
if date_obj.month >= 4:
financial_year = str(date_obj.year)
elif date_obj.month < 4:
financial_year = str(date_obj.year - 1)
date_range_list.append(
dict(
day_start_date=each_dates,
day_end_date=each_dates,
month_start_date=each_dates[:-2] + "01",
month_end_date=each_dates,
year_start_date=financial_year + "-04-01",
year_end_date=each_dates,
)
if CommonConstants.PROPERTY in input_json and CommonConstants.START_DATE \
in input_json[CommonConstants.PROPERTY] and CommonConstants.END_DATE \
in input_json[CommonConstants.PROPERTY]:
# Fetching Start date
start_date = datetime.strptime(
input_json[CommonConstants.PROPERTY][CommonConstants.START_DATE],
CommonConstants.DATE_TIME_FORMAT
)
# Fetching End date
end_date = datetime.strptime(
input_json[CommonConstants.PROPERTY][CommonConstants.END_DATE],
CommonConstants.DATE_TIME_FORMAT
)
return date_range_list
logger.info(f"Creating list of dates starting from {start_date} "
f"to {end_date}")
# Generating a date range
date_list = pd.date_range(start=start_date,
end=end_date,
freq=CommonConstants.FREQUENCY)
# Iterating and creating where clause filters
for each_dates in date_list.strftime(
CommonConstants.DATE_TIME_FORMAT).to_list():
# To get the each_dates - financial year
financial_year = None
date_obj = datetime.strptime(each_dates,
CommonConstants.DATE_TIME_FORMAT)
if date_obj.month >= 4:
financial_year = str(date_obj.year)
elif date_obj.month < 4:
financial_year = str(date_obj.year - 1)
date_range_list.append(
dict(
day_start_date=each_dates,
day_end_date=each_dates,
month_start_date=each_dates[:-2] + "01",
month_end_date=each_dates,
year_start_date=financial_year + "-04-01",
year_end_date=each_dates,
)
)
flag = True
return flag, date_range_list
def get_queries_from_db(self, input_json, date_filter):
"""
......@@ -251,168 +263,231 @@ class CustomReportHandler:
:param input_json:
:return:
"""
status = False
message = "Error generating a message"
data = input_json["file_name"]
response_json = dict(
job_id=input_json['job_id'],
file_path="",
file_name=input_json["file_name"],
is_schedule="",
project_id=input_json["project_id"]
)
try:
logger.info(f"Report: {str(input_json['job_type']).lower()}")
# if str(input_json.job_type).lower() == ReportType.REFINERY_REPORT:
if str(input_json["job_type"]).lower() == ReportType.REFINERY_REPORT:
logger.info("Generating custom date filter with in the range")
logger.info("Generating custom date filter with in the range")
# Getting custom date range using start date and end date
date_filter = self.create_custom_date_filter(input_json=input_json)
with pd.ExcelWriter(
input_json["file_name"],
engine="xlsxwriter") as writer:
for each_date_range in date_filter:
logger.info(f"date filter: {each_date_range}")
report_template = copy.deepcopy(
SterliteRefineryTemplate.REPORT_TEMPLATE
)
start_col = CommonConstants.START_COLUMN
start_row = CommonConstants.START_ROW
total_column = 0
workbook = writer.book
sheet_name = datetime.strptime(
each_date_range[CommonConstants.DAY_START_DATE],
"%Y-%m-%d").strftime("%d %b %Y")
worksheet = None
# Iterating over sterlite json file
for each_blocks in report_template:
logger.info("Fetching each KPI data from queries")
each_blocks = self.get_queries_from_db(
input_json=each_blocks, date_filter=each_date_range
)
logger.info("Writing each block into excel sheet")
shape, worksheet, header_flag = \
self.write_dataframe_to_excel(
input_json=each_blocks,
writer=writer,
workbook=workbook,
sheet_name=sheet_name,
start_col=start_col,
start_row=start_row,
header_merge_format=SterliteRefineryTemplate.COLUMN_HEADER_FORMAT,
column_merge_format=SterliteRefineryTemplate.COLUMN_HEADER_FORMAT,
blank_merge_format=SterliteRefineryTemplate.BLANK_COLUMN_HEADER_FORMAT
)
date_flag, date_filter = self.create_custom_date_filter(
input_json=input_json)
if total_column < shape[1]:
total_column = shape[1]
start_row += shape[0] + 2
if date_flag:
with pd.ExcelWriter(
input_json["file_name"],
engine="xlsxwriter") as writer:
if header_flag:
start_row += 1
for each_date_range in date_filter:
logger.info(f"date filter: {each_date_range}")
# Add a header format.
main_header_format = workbook.add_format(
SterliteRefineryTemplate.WORKSHEET_HEADER_FORMAT)
logger.info("Creating Header for each sheet")
report_template = copy.deepcopy(
SterliteRefineryTemplate.REPORT_TEMPLATE
)
start_col = CommonConstants.START_COLUMN
start_row = CommonConstants.START_ROW
total_column = 0
workbook = writer.book
sheet_name = datetime.strptime(
each_date_range[CommonConstants.DAY_START_DATE],
"%Y-%m-%d").strftime("%d %b %Y")
worksheet = None
# Iterating over sterlite json file
for each_blocks in report_template:
logger.info("Fetching each KPI data from queries")
each_blocks = self.get_queries_from_db(
input_json=each_blocks, date_filter=each_date_range
)
# (Merge Rows, Start Column, '', Total Column, Title, Format)
worksheet.merge_range(1, 0, 0, 0, sheet_name,
main_header_format)
worksheet.merge_range(1, 1, 0, total_column - 1,
ReportType.REFINERY_WORKSHEET_NAME,
main_header_format)
logger.info("Writing each block into excel sheet")
shape, worksheet, header_flag = \
self.write_dataframe_to_excel(
input_json=each_blocks,
writer=writer,
workbook=workbook,
sheet_name=sheet_name,
start_col=start_col,
start_row=start_row,
header_merge_format=SterliteRefineryTemplate.COLUMN_HEADER_FORMAT,
column_merge_format=SterliteRefineryTemplate.COLUMN_HEADER_FORMAT,
blank_merge_format=SterliteRefineryTemplate.BLANK_COLUMN_HEADER_FORMAT
)
if total_column < shape[1]:
total_column = shape[1]
start_row += shape[0] + 2
if header_flag:
start_row += 1
logger.info("Creating Header for each sheet")
self.merge_worksheet(
start_row=1, start_col=0, end_row=0,
end_col=0, title=sheet_name,
style=SterliteRefineryTemplate.WORKSHEET_HEADER_FORMAT,
worksheet=worksheet, workbook=workbook,
)
self.merge_worksheet(
start_row=1, start_col=1, end_row=0,
end_col=total_column - 1,
title=ReportType.REFINERY_WORKSHEET_NAME,
style=SterliteRefineryTemplate.WORKSHEET_HEADER_FORMAT,
worksheet=worksheet, workbook=workbook,
)
# Setting width to the column
worksheet.set_column(0, total_column,
CommonConstants.OVERALL_COLUMN_WIDTH)
# Setting width to the column
worksheet.set_column(0, total_column,
CommonConstants.REFINERY_COLUMN_WIDTH)
# Center alignment of Excel data
workbook.formats[0].set_align('center')
logger.info(f"Report completed for: {each_date_range}")
# Center alignment of Excel data
workbook.formats[0].set_align('center')
logger.info(f"Report completed for: {each_date_range}")
else:
logger.error("Missing: Start Date or End Date")
if str(input_json["job_type"]).lower() == ReportType.ACP_REPORT:
logger.info("Generating custom date filter with in the range")
# Getting custom date range using start date and end date
date_filter = self.create_custom_date_filter(input_json=input_json)
with pd.ExcelWriter(
input_json["file_name"],
engine="xlsxwriter") as writer:
for each_date_range in date_filter:
logger.info(f"date filter: {each_date_range}")
report_template = copy.deepcopy(
ACPReportTemplate.REPORT_TEMPLATE
)
start_col = CommonConstants.START_COLUMN
start_row = CommonConstants.START_ROW
total_column = 0
workbook = writer.book
sheet_name = datetime.strptime(
each_date_range[CommonConstants.DAY_START_DATE],
"%Y-%m-%d").strftime("%d %b %Y")
worksheet = None
# Iterating over sterlite json file
for each_blocks in report_template:
logger.info("Fetching each KPI data from queries")
each_blocks = self.get_queries_from_db(
input_json=each_blocks, date_filter=each_date_range
)
date_flag, date_filter = self.create_custom_date_filter(
input_json=input_json
)
logger.info("Writing each block into excel sheet")
shape, worksheet, header_flag = \
self.write_dataframe_to_excel(
input_json=each_blocks,
writer=writer,
workbook=workbook,
sheet_name=sheet_name,
start_col=start_col,
start_row=start_row,
header_merge_format=ACPReportTemplate.COLUMN_HEADER_FORMAT,
column_merge_format=ACPReportTemplate.COLUMN_HEADER_FORMAT,
blank_merge_format=ACPReportTemplate.BLANK_COLUMN_HEADER_FORMAT
)
if date_flag:
with pd.ExcelWriter(
input_json["file_name"],
engine="xlsxwriter") as writer:
if total_column < shape[1]:
total_column = shape[1]
start_row += shape[0] + 2
for each_date_range in date_filter:
logger.info(f"date filter: {each_date_range}")
if header_flag:
start_row += 1
report_template = copy.deepcopy(
ACPReportTemplate.REPORT_TEMPLATE
)
start_col = CommonConstants.START_COLUMN
start_row = CommonConstants.START_ROW
total_column = 0
workbook = writer.book
sheet_name = datetime.strptime(
each_date_range[CommonConstants.DAY_START_DATE],
"%Y-%m-%d").strftime("%d %b %Y")
worksheet = None
# Iterating over sterlite json file
for each_blocks in report_template:
logger.info("Fetching each KPI data from queries")
each_blocks = self.get_queries_from_db(
input_json=each_blocks, date_filter=each_date_range
)
# Add a header format.
main_header_format = workbook.add_format(
ACPReportTemplate.WORKSHEET_HEADER_FORMAT)
logger.info("Writing each block into excel sheet")
shape, worksheet, header_flag = \
self.write_dataframe_to_excel(
input_json=each_blocks, writer=writer,
workbook=workbook, sheet_name=sheet_name,
start_col=start_col, start_row=start_row,
header_merge_format=ACPReportTemplate.COLUMN_HEADER_FORMAT,
column_merge_format=ACPReportTemplate.COLUMN_HEADER_FORMAT,
blank_merge_format=ACPReportTemplate.BLANK_COLUMN_HEADER_FORMAT
)
if total_column < shape[1]:
total_column = shape[1]
start_row += shape[0] + 2
if header_flag:
start_row += 1
logger.info("Creating Header for each sheet")
# -------- Image Section --------
logger.info("Adding Image in header")
self.merge_worksheet(
start_row=0, start_col=0, end_row=2, end_col=2,
title="",
style=ACPReportTemplate.WORKSHEET_IMAGE_HEADER_FORMAT,
worksheet=worksheet, workbook=workbook,
)
worksheet.insert_image('A1', 'logo.png')
# -------- Heading Section --------
logger.info("Adding Heading in Header")
self.merge_worksheet(
start_row=0, start_col=3, end_row=1,
end_col=total_column - 1,
title=ReportType.ACP_WORKSHEET_NAME_1,
style=ACPReportTemplate.WORKSHEET_HEADER_FORMAT,
worksheet=worksheet, workbook=workbook,
)
self.merge_worksheet(
start_row=2, start_col=3,
end_row=2, end_col=total_column - 1,
title=ReportType.ACP_WORKSHEET_NAME_2,
style=ACPReportTemplate.WORKSHEET_HEADER_FORMAT,
worksheet=worksheet, workbook=workbook,
)
logger.info("Creating Header for each sheet")
# -------- Date HEADER Section --------
logger.info("Adding Date time in header")
self.merge_worksheet(
start_row=3, start_col=0, end_row=3, end_col=1,
title=ReportType.DATE,
style=ACPReportTemplate.WORKSHEET_DATE_HEADER_FORMAT,
worksheet=worksheet, workbook=workbook,
)
# (Merge Rows, Start Column, '', Total Column, Title, Format)
worksheet.merge_range(1, 0, 0, 0, sheet_name,
main_header_format)
worksheet.merge_range(1, 1, 0, total_column - 1,
ReportType.ACP_WORKSHEET_NAME,
main_header_format)
# -------- Date Section --------
logger.info("Adding Date time in header")
self.merge_worksheet(
start_row=3, start_col=2,
end_row=3, end_col=3, title=sheet_name,
style=ACPReportTemplate.WORKSHEET_DATE_FORMAT,
worksheet=worksheet, workbook=workbook,
)
# Setting width to the column
worksheet.set_column(0, total_column,
CommonConstants.OVERALL_COLUMN_WIDTH)
# Setting width to the column
worksheet.set_column(0, total_column,
CommonConstants.ACP_COLUMN_WIDTH)
# Center alignment of Excel data
workbook.formats[0].set_align('center')
logger.info(f"Report completed for: {each_date_range}")
# Center alignment of Excel data
workbook.formats[0].set_align('center')
logger.info(f"Report completed for: {each_date_range}")
else:
logger.error("Missing: Start Date or End Date")
except GeneralException as err:
logger.error(f"Exception in custom_report_handler: {err}")
return status, message, data
return response_json
def merge_worksheet(self, start_row, start_col,
end_row, end_col, title, style, worksheet, workbook):
"""
This method is to merge the worksheet
:param start_row:
:param start_col:
:param end_row:
:param end_col:
:param title:
:param style:
:param worksheet:
:param workbook:
:return:
"""
style_format = workbook.add_format(style)
worksheet.merge_range(
start_row, start_col, end_row, end_col, title, style_format
)
......@@ -14,6 +14,8 @@ class ReportInput(BaseModel):
class ReportOutput(BaseModel):
status: bool
message: str
data: str
job_id: str
file_path: str = ""
file_name: Optional[str] = None
is_schedule: Optional[bool] = False
project_id: str
......@@ -63,10 +63,10 @@ async def custom_report_function(input_json: custom_report_input_model):
"""
try:
status, message, data = report_handler_obj.custom_report_handler(
response = report_handler_obj.custom_report_handler(
input_json=input_json
)
return {"status": status, "message": message, "data": data}
return response
except Exception as err:
logger.exception(
CommonConstants.EXCEPTION_RAISER.format(str(err)),
......
from scripts.constants import PostgresConstant
from scripts.core.db.postgres.custom_report_query import ACPQuery
class ACPReportTemplate:
""" HEADER STYLING FORMAT """
WORKSHEET_IMAGE_HEADER_FORMAT = {
"bold": True,
"align": "center",
"valign": "center",
"border": 1,
}
WORKSHEET_HEADER_FORMAT = {
"bold": True,
"align": "center",
"valign": "center",
"fg_color": "#e6e7eb",
"font_color": "#1b314f",
"border": 1,
'font_size': '20',
"fg_color": "#1AC5F3",
'font_size': '18',
}
""" DATE HEADER """
WORKSHEET_DATE_HEADER_FORMAT = {
"bold": True,
"align": "center",
"valign": "center",
"border": 1,
"fg_color": "#F4AD66",
'font_size': '12',
}
WORKSHEET_DATE_FORMAT = {
"bold": True,
"align": "center",
"valign": "center",
"border": 1,
"fg_color": "#F4CE84",
'font_size': '12',
}
""" COLUMN HEADER FORMAT """
COLUMN_HEADER_FORMAT = {
"bold": True,
"align": "center",
......@@ -25,6 +50,8 @@ class ACPReportTemplate:
"align": "center",
"valign": "center",
}
""" REPORT TEMPLATE """
REPORT_TEMPLATE = [
{
"BLANK": {
......@@ -47,17 +74,11 @@ class ACPReportTemplate:
"FO": [
ACPQuery.FO.QUERY_1,
ACPQuery.FO.QUERY_2,
],
"PNG/LPG": [
],
"Rejection": [
ACPQuery.Rejection.QUERY_1,
ACPQuery.Rejection.QUERY_2,
],
"Power": [
]
},
"data": [],
"data_column": [
......@@ -105,161 +126,215 @@ class ACPReportTemplate:
],
},
},
{
"A-SHIFT COMMUNICATION": {
"columns": [
"S.No", "A-SHIFT COMMUNICATION"
],
"query": {
"AShiftCommunication": [
ACPQuery.AShiftCommunication.QUERY_1,
],
},
"data": [],
"data_column": [
"s_no", "a_shift_comm"
],
},
"A SHIFTWISE DATA": {
"columns": [
"MOULD", "SET VALUE", "FIELD WT"
],
"query": {
"AShiftwiseData": [
ACPQuery.AShiftwiseData.QUERY_1,
]
},
"data": [],
"data_column": [
"mould", "set_value", "field_wt"
],
"addition": {
"merge_header": "SHIFTWISE DATA"
}
}
},
{
"B-SHIFT COMMUNICATION": {
"columns": [
"S.No", "B-SHIFT COMMUNICATION"
],
"query": {
"BShiftCommunication": [
ACPQuery.BShiftCommunication.QUERY_1,
],
},
"data": [],
"data_column": [
"s_no", "b_shift_comm"
],
},
"B SHIFTWISE DATA": {
"columns": [
"MOULD", "SET VALUE", "FIELD WT"
],
"query": {
"BShiftwiseData": [
ACPQuery.BShiftwiseData.QUERY_1,
]
},
"data": [],
"data_column": [
"mould", "set_value", "field_wt"
],
"addition": {
"merge_header": "SHIFTWISE DATA"
}
}
},
{
"C-SHIFT COMMUNICATION": {
"columns": [
"S.No", "C-SHIFT COMMUNICATION"
],
"query": {
"CShiftCommunication": [
ACPQuery.CShiftCommunication.QUERY_1,
],
},
"data": [],
"data_column": [
"s_no", "a_shift_comm"
],
},
"C SHIFTWISE DATA": {
"columns": [
"MOULD", "SET VALUE", "FIELD WT"
],
"query": {
"CShiftwiseData": [
ACPQuery.CShiftwiseData.QUERY_1,
]
},
"data": [],
"data_column": [
"mould", "set_value", "field_wt"
],
"addition": {
"merge_header": "SHIFTWISE DATA"
}
}
},
{
"PARAMETERS": {
"columns": [
"PARAMETERS", "RANGE", "VALUE"
],
"query": {
"Furnace1FoTemperature": [
ACPQuery.Furnace1FoTemperature.QUERY_1,
],
"FoOilPressure": [
ACPQuery.FoOilPressure.QUERY_1,
],
"Furnace1OilFlow": [
ACPQuery.Furnace1OilFlow.QUERY_1,
],
"Furnace2OilFlow": [
ACPQuery.Furnace2OilFlow.QUERY_1,
],
"MeltTemperature": [
# {
# "A-SHIFT COMMUNICATION": {
# "columns": [
# "S.No", "A-SHIFT COMMUNICATION"
# ],
# "query": {
# "AShiftCommunication": [
# ACPQuery.AShiftCommunication.QUERY_1,
# ],
# },
# "data": [],
# "data_column": [
# "s_no", "a_shift_comm"
# ],
# },
# "A SHIFTWISE DATA": {
# "columns": [
# "MOULD", "SET VALUE", "FIELD WT"
# ],
# "query": {
# "AShiftwiseData": [
# ACPQuery.AShiftwiseData.QUERY_1,
# ]
# },
# "data": [],
# "data_column": [
# "mould", "set_value", "field_wt"
# ],
# "addition": {
# "merge_header": "SHIFTWISE DATA"
# }
# }
# },
# {
# "B-SHIFT COMMUNICATION": {
# "columns": [
# "S.No", "B-SHIFT COMMUNICATION"
# ],
# "query": {
# "BShiftCommunication": [
# ACPQuery.BShiftCommunication.QUERY_1,
# ],
# },
# "data": [],
# "data_column": [
# "s_no", "b_shift_comm"
# ],
# },
# "B SHIFTWISE DATA": {
# "columns": [
# "MOULD", "SET VALUE", "FIELD WT"
# ],
# "query": {
# "BShiftwiseData": [
# ACPQuery.BShiftwiseData.QUERY_1,
# ]
# },
# "data": [],
# "data_column": [
# "mould", "set_value", "field_wt"
# ],
# "addition": {
# "merge_header": "SHIFTWISE DATA"
# }
# }
# },
# {
# "C-SHIFT COMMUNICATION": {
# "columns": [
# "S.No", "C-SHIFT COMMUNICATION"
# ],
# "query": {
# "CShiftCommunication": [
# ACPQuery.CShiftCommunication.QUERY_1,
# ],
# },
# "data": [],
# "data_column": [
# "s_no", "a_shift_comm"
# ],
# },
# "C SHIFTWISE DATA": {
# "columns": [
# "MOULD", "SET VALUE", "FIELD WT"
# ],
# "query": {
# "CShiftwiseData": [
# ACPQuery.CShiftwiseData.QUERY_1,
# ]
# },
# "data": [],
# "data_column": [
# "mould", "set_value", "field_wt"
# ],
# "addition": {
# "merge_header": "SHIFTWISE DATA"
# }
# }
# },
# {
# "PARAMETERS": {
# "columns": [
# "PARAMETERS", "RANGE", "VALUE"
# ],
# "query": {
# "Furnace1FoTemperature": [
# ACPQuery.Furnace1FoTemperature.QUERY_1,
# ],
# "FoOilPressure": [
# ACPQuery.FoOilPressure.QUERY_1,
# ],
# "Furnace1OilFlow": [
# ACPQuery.Furnace1OilFlow.QUERY_1,
# ],
# "Furnace2OilFlow": [
# ACPQuery.Furnace2OilFlow.QUERY_1,
# ],
# "MeltTemperature": [
# ACPQuery.MeltTemperature.QUERY_1,
# ],
# "MouldTemperature": [
# ACPQuery.MouldTemperature.QUERY_1,
# ],
# "LpgVapourizerTemperature": [
# ACPQuery.LpgVapourizerTemperature.QUERY_1,
# ],
# "LaunderBlowerAirPressure": [
# ACPQuery.LaunderBlowerAirPressure.QUERY_1,
# ],
# "Furnace1ShellTemperature": [
# ACPQuery.Furnace1ShellTemperature.QUERY_1,
# ],
# "Furnace2ShellTemperature": [
# ACPQuery.Furnace2ShellTemperature.QUERY_1,
# ],
# "BariumSulphateSolutionSpecificGravity": [
# ACPQuery.BariumSulphateSolutionSpecificGravity.QUERY_1,
# ],
# "MouldAlignmentLevel": [
# ACPQuery.MouldAlignmentLevel.QUERY_1,
# ],
# "HeatNoArsenicA": [
# ACPQuery.HeatNoArsenicA.QUERY_1,
# ],
# "HeatNoArsenicB": [
# ACPQuery.HeatNoArsenicB.QUERY_1,
# ],
# "HeatNoArsenicC": [
# ACPQuery.HeatNoArsenicC.QUERY_1,
# ]
# },
# "data": [],
# "data_column": [
# "parameters", "range", "value"
# ],
# },
# },
# {
# "FURNACE SHIFT A": {
# "columns": [
# "FURNACE", "BURNER 1", "BURNER 2", "BURNER 3"
# ],
# "query": {
# "FurnaceShiftA": [
# ACPQuery.FurnaceShiftA.QUERY_1,
# ],
# },
# "data": [],
# "data_column": [
# "furnace", "burner_1", "burner_2", "burner_3"
# ],
# "addition": {
# "merge_header": "SHIFT - A DATA"
# }
# },
# "FURNACE SHIFT B": {
# "columns": [
# "FURNACE", "BURNER 1", "BURNER 2", "BURNER 3"
# ],
# "query": {
# "FurnaceShiftB": [
# ACPQuery.FurnaceShiftB.QUERY_1,
# ],
# },
# "data": [],
# "data_column": [
# "furnace", "burner_1", "burner_2", "burner_3"
# ],
# "addition": {
# "merge_header": "SHIFT - B DATA"
# }
# },
# "FURNACE SHIFT C": {
# "columns": [
# "FURNACE", "BURNER 1", "BURNER 2", "BURNER 3"
# ],
# "query": {
# "FurnaceShiftC": [
# ACPQuery.FurnaceShiftC.QUERY_1,
# ],
# },
# "data": [],
# "data_column": [
# "furnace", "burner_1", "burner_2", "burner_3"
# ],
# "addition": {
# "merge_header": "SHIFT - C DATA"
# }
# }
# }
],
"MouldTemperature": [
ACPQuery.MouldTemperature.QUERY_1,
],
"LpgVapourizerTemperature": [
ACPQuery.LpgVapourizerTemperature.QUERY_1,
],
"LaunderBlowerAirPressure": [
ACPQuery.LaunderBlowerAirPressure.QUERY_1,
],
"Furnace1ShellTemperature": [
ACPQuery.Furnace1ShellTemperature.QUERY_1,
],
"Furnace2ShellTemperature": [
ACPQuery.Furnace2ShellTemperature.QUERY_1,
],
"BariumSulphateSolutionSpecificGravity": [
ACPQuery.BariumSulphateSolutionSpecificGravity.QUERY_1,
],
"MouldAlignmentLevel": [
ACPQuery.MouldAlignmentLevel.QUERY_1,
],
"HeatNoArsenicA": [
ACPQuery.HeatNoArsenicA.QUERY_1,
],
"HeatNoArsenicB": [
ACPQuery.HeatNoArsenicB.QUERY_1,
],
"HeatNoArsenicC": [
ACPQuery.HeatNoArsenicC.QUERY_1,
]
},
"data": [],
"data_column": [
"parameters", "range", "value"
],
},
}
]
......@@ -3,6 +3,7 @@ from scripts.core.db.postgres.custom_report_query import SterliteRefineryQuery
class SterliteRefineryTemplate:
""" HEADER STYLING FORMAT """
WORKSHEET_HEADER_FORMAT = {
"bold": True,
"align": "center",
......@@ -12,6 +13,8 @@ class SterliteRefineryTemplate:
"border": 1,
'font_size': '20',
}
""" COLUMN HEADER FORMAT """
COLUMN_HEADER_FORMAT = {
"bold": True,
"align": "center",
......@@ -25,6 +28,8 @@ class SterliteRefineryTemplate:
"align": "center",
"valign": "center",
}
""" REPORT TEMPLATE """
REPORT_TEMPLATE = [
{
"CATHODE PRODUCTION REVISED": {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment