Commit 72f96be0 authored by suryakant's avatar suryakant

Sterlite Custom Report Updates

parent b60bfd58
...@@ -23,7 +23,7 @@ class APIConstants: ...@@ -23,7 +23,7 @@ class APIConstants:
SHUTDOWN = "shutdown" SHUTDOWN = "shutdown"
HEALTH_CHECK = "/healthcheck" HEALTH_CHECK = "/healthcheck"
INIT_DB_ENDPOINT = "/" CUSTOM_REPORT_ENDPOINT = "/custom_report"
class CommonConstants: class CommonConstants:
...@@ -32,15 +32,24 @@ class CommonConstants: ...@@ -32,15 +32,24 @@ class CommonConstants:
""" """
GET = "GET" GET = "GET"
POST = "POST" POST = "POST"
EVENT_HANDLING_ENDPOINT = "Event Handler Endpoints" CUSTOM_REPORT_TAG = "Event Handler Endpoints"
EXCEPTION_RAISER = "Exception ->{}" EXCEPTION_RAISER = "Exception ->{}"
DEV_KEY = "dev" DEV_KEY = "dev"
DATE_TIME_FORMAT = "%Y-%m-%d"
QUERY = "query"
DAY_START_DATE = "day_start_date"
DAY_END_DATE = "day_end_date"
MONTH_START_DATE = "month_start_date"
MONTH_END_DATE = "month_end_date"
YEAR_START_DATE = "year_start_date"
YEAR_END_DATE = "year_end_date"
class PostgresConstants: class ReportType:
""" """
Constants related to PostgreSQL database Constants related to ReportType
""" """
REFINERY_REPORT = "refinery_report"
figlet = """ figlet = """
......
from scripts.constants import PostgresConstants, CommonConstants import pandas as pd
from scripts.configurations import postgres_details from datetime import datetime
from scripts.constants import ReportType, CommonConstants
from scripts.template.sterlite_report_template import SterliteRefineryTemplate
from scripts.core.logging.application_logging import logger from scripts.core.logging.application_logging import logger
from scripts.core.exception.app_exceptions import GeneralException
from scripts.core.utilities.postgresql_db_utils import PostgresDBUtility from scripts.core.utilities.postgresql_db_utils import PostgresDBUtility
class CustomReport: class CustomReportHandler:
def __init__(self): def __init__(self):
self.postgres_db_obj = PostgresDBUtility() self.postgres_db_obj = PostgresDBUtility()
def create_custom_date_filter(self, input_json):
"""
This method convert start date and end date to a date range.
:param input_json:
:return:
"""
date_range_list = []
# Start date
start_date = datetime.strptime(
input_json["property"]["start_date"], CommonConstants.DATE_TIME_FORMAT
)
# End date
end_date = datetime.strptime(
input_json["property"]["end_date"], CommonConstants.DATE_TIME_FORMAT
)
logger.info(f"Creating list of dates starting from {start_date} to {end_date}")
date_list = pd.date_range(start_date, end_date, freq='D')
# Iterating and creating where clause filters
# Output - {'day_start_date': 'YYYY-MM-DD', 'day_end_date': 'YYYY-MM-DD',
# 'month_start_date': 'YYYY-MM-DD', 'month_end_date': 'YYYY-MM-DD',
# 'year_start_date': 'YYYY-MM-DD', 'year_end_date': 'YYYY-MM-DD'}
for each_dates in date_list.strftime(
CommonConstants.DATE_TIME_FORMAT).to_list():
# To get the financial year
financial_year = None
date_obj = datetime.strptime(each_dates, CommonConstants.DATE_TIME_FORMAT)
if date_obj.month >= 4:
financial_year = str(date_obj.year)
elif date_obj.month < 4:
financial_year = str(date_obj.year - 1)
date_range_list.append(
dict(
day_start_date=each_dates,
day_end_date=each_dates,
month_start_date=each_dates[:-2] + "01",
month_end_date=each_dates,
year_start_date=financial_year + "-04-01",
year_end_date=each_dates,
)
)
return date_range_list
def get_queries_from_db(self, input_json, date_filter):
"""
:param input_json:
:param date_filter:
:return:
"""
for each_blocks in input_json:
# Iterating each blocks for fetching query
print(each_blocks)
if input_json[each_blocks][CommonConstants.QUERY]:
for each_kpi in input_json[each_blocks][CommonConstants.QUERY]:
temp_data_dict = dict()
# Iterating each query for each KPI
for each_query in \
input_json[each_blocks][CommonConstants.QUERY][each_kpi]:
query = each_query. \
format(
day_start_date=date_filter[CommonConstants.DAY_START_DATE],
day_end_date=date_filter[CommonConstants.DAY_END_DATE],
month_start_date=date_filter[
CommonConstants.MONTH_START_DATE],
month_end_date=date_filter[CommonConstants.MONTH_END_DATE],
year_start_date=date_filter[
CommonConstants.YEAR_START_DATE],
year_end_date=date_filter[CommonConstants.YEAR_END_DATE]
)
response = self.postgres_db_obj.fetch_data(query=query)
if response:
temp_data_dict.update(dict(response[0]))
if not temp_data_dict:
# Creating null values if no data
for each_columns in input_json[each_blocks]["data_column"]:
temp_data_dict.update({each_columns: None})
input_json[each_blocks]["data"].append(temp_data_dict)
else:
temp_data_dict = dict()
for each_columns in input_json[each_blocks]["data_column"]:
temp_data_dict.update(
{each_columns: None}
)
input_json[each_blocks]["data"].append(temp_data_dict)
return input_json
def custom_report_handler(self, input_json):
"""
:param input_json:
:return:
"""
status = False
message = "Error generating a message"
data = "Data"
try:
# if str(input_json.job_type).lower() == ReportType.REFINERY_REPORT:
if str(input_json["job_type"]).lower() == ReportType.REFINERY_REPORT:
date_filter = self.create_custom_date_filter(input_json=input_json)
for each_date_range in date_filter:
# Iterating over sterlite json file
for each_blocks in SterliteRefineryTemplate.REPORT_TEMPLATE:
# Getting the data from queries
each_blocks = self.get_queries_from_db(
input_json=each_blocks, date_filter=each_date_range
)
# print(each_blocks)
print("=========================================")
break
except GeneralException as err:
logger.error(f"Exception in custom_report_handler: {err}")
return status, message, data
...@@ -4,8 +4,13 @@ from pydantic import BaseModel ...@@ -4,8 +4,13 @@ from pydantic import BaseModel
class ReportInput(BaseModel): class ReportInput(BaseModel):
from_date: Optional[str] job_id: Optional[str]
end_date: Optional[str] user_id: Optional[str]
report: Optional[dict]
property: Optional[dict]
job_type: Optional[str]
tz: Optional[str]
file_name: Optional[str]
class ReportOutput(BaseModel): class ReportOutput(BaseModel):
......
...@@ -12,7 +12,7 @@ Usage: ...@@ -12,7 +12,7 @@ Usage:
""" """
from fastapi import APIRouter from fastapi import APIRouter
from scripts.configurations import service_details from scripts.configurations import service_details
from scripts.core.handler.event_handler import CustomReport from scripts.core.handler.event_handler import CustomReportHandler
from scripts.core.logging.application_logging import logger from scripts.core.logging.application_logging import logger
from scripts.constants import APIConstants, CommonConstants, figlet from scripts.constants import APIConstants, CommonConstants, figlet
from scripts.core.schemas.api import ( from scripts.core.schemas.api import (
...@@ -20,8 +20,8 @@ from scripts.core.schemas.api import ( ...@@ -20,8 +20,8 @@ from scripts.core.schemas.api import (
custom_report_output_model custom_report_output_model
) )
event_handler_obj = CustomReport() report_handler_obj = CustomReportHandler()
event_router = APIRouter(tags=[CommonConstants.EVENT_HANDLING_ENDPOINT]) event_router = APIRouter(tags=[CommonConstants.CUSTOM_REPORT_TAG])
@event_router.on_event(APIConstants.STARTUP) @event_router.on_event(APIConstants.STARTUP)
...@@ -50,23 +50,25 @@ async def ping(): ...@@ -50,23 +50,25 @@ async def ping():
@event_router.post( @event_router.post(
APIConstants.INIT_DB_ENDPOINT, response_model=custom_report_output_model) APIConstants.CUSTOM_REPORT_ENDPOINT, response_model=custom_report_output_model)
async def initialize_db(input_json: custom_report_input_model): async def custom_report_function(input_json: custom_report_input_model):
""" """
Initiate postgres db and create tables Initiate postgres db and create tables
Args: Args:
content (InitDbInput): Request body containing the necessary parameters. content (ReportInput): Request body containing the necessary parameters.
Returns: Returns:
:param : :param :input_json
""" """
try: try:
return {"status": True, "message": ""} status, message, data = report_handler_obj.custom_report_handler(
input_json=input_json
)
return {"status": status, "message": message, "data": data}
except Exception as err: except Exception as err:
logger.exception( logger.exception(
CommonConstants.EXCEPTION_RAISER.format(str(err)), CommonConstants.EXCEPTION_RAISER.format(str(err)),
exc_info=service_details.exception_trace, exc_info=service_details.exception_trace,
) )
return {"status": False, "message": str(err)}
...@@ -34,7 +34,8 @@ class PostgresDBUtility: ...@@ -34,7 +34,8 @@ class PostgresDBUtility:
This method is used for selecting records from tables. This method is used for selecting records from tables.
:param query: The select query to be executed :param query: The select query to be executed
:param db: Session :param db: Session
:return: status: The status True on success and False on failure and the list of rows :return: status: The status True on success and False on failure and
the list of rows
""" """
logger.debug(f" SQL QUERY {query}") logger.debug(f" SQL QUERY {query}")
connection = None connection = None
...@@ -60,7 +61,8 @@ class PostgresDBUtility: ...@@ -60,7 +61,8 @@ class PostgresDBUtility:
""" """
This method is used for selecting records from tables. This method is used for selecting records from tables.
:param query: The select query to be executed :param query: The select query to be executed
:return: status: The status True on success and False on failure and the list of rows :return: status: The status True on success and False on failure and
the list of rows
""" """
connection = None connection = None
result = "" result = ""
...@@ -244,7 +246,7 @@ class PostgresDBUtility: ...@@ -244,7 +246,7 @@ class PostgresDBUtility:
result = [] result = []
try: try:
connection = self.create_connection() connection = self.create_connection()
cursor = connection.cursor() cursor = connection.cursor(cursor_factory=self.cursor_type)
cursor.execute(query) cursor.execute(query)
result = cursor.fetchall() result = cursor.fetchall()
except Exception as e: except Exception as e:
......
from scripts.core.db.postgres.custom_report_query import SterliteRefineryQuery
class SterliteRefineryTemplate:
REPORT_TEMPLATE = [
{
"ANODE AVAILABILITY": {
"columns": [
"ANODE AVAILABILITY",
"UOM",
"NORMS (Month)",
"ON DATE(Day)",
"MTD",
"YTD"
],
"query": {
"ANODE_AVAILABILITY": [
SterliteRefineryQuery.AnodeAvailability.QUERY_AA,
SterliteRefineryQuery.AnodeAvailability.QUERY_MTD,
SterliteRefineryQuery.AnodeAvailability.QUERY_YTD
],
"DO CELLS IN OPERATION": [
SterliteRefineryQuery.DOCellsInOperation.QUERY_AA
],
"Total Cells In Operation": [
SterliteRefineryQuery.TotalCellsInOperation.QUERY_1,
SterliteRefineryQuery.TotalCellsInOperation.QUERY_2,
SterliteRefineryQuery.TotalCellsInOperation.QUERY_3
]
},
"data": [],
"data_column": ["anode_availability", "uom",
"norms", "on_date", "mtd",
"ytd"],
"description": "",
"format": ""
},
"SAFETY REPORT": {
"columns": [
"SAFETY REPORT",
"UOM",
"NORMS",
"ON DATE",
"MTD",
"YTD"
],
"query": [],
"data": [],
"data_column": ["safety_report", "uom",
"norms", "on_date", "mtd",
"ytd"],
"additions": [],
"description": "",
"format": ""
}
},
{
"POWER AVAILABILITY": {
"columns": [
"POWER AVAILABILITY",
"UOM",
"NORMS",
"ON DATE",
"MTD",
"YTD"
],
"query": [],
"data": [],
"additions": [],
"description": "",
"format": ""
},
"RM ANODE RECEIPT DETAILS": {
"columns": [
"RM ANODE RECEIPT DETAILS",
"ON DATE",
"MTD",
"YTD",
"Material",
"OPENING STOCK"
],
"query": [],
"data": [],
"additions": [],
"description": "",
"format": ""
}
},
{
"EFFICIENCIES": {
"columns": [
"EFFICIENCIES",
"UOM",
"NORMS",
"ON DATE",
"MTD",
"YTD"
],
"query": [],
"data": [],
"additions": [],
"description": "",
"format": ""
},
"BANK / CROP": {
"columns": [
"BANK / CROP",
"CE (%)",
"THEOR. WEIGHT",
"ACTUAL WEIGHT",
"STRIPPING TIME"
],
"query": [],
"data": [],
"additions": [],
"description": "",
"format": ""
}
},
{
"PRODUCTION": {
"columns": [
"PRODUCTION",
"UOM",
"NORMS",
"ON DATE",
"MTD",
"YTD"
],
"query": [],
"data": [],
"additions": [],
"description": "",
"format": ""
},
"CIRCULATION": {
"columns": [
"CIRCULATION",
"CE%",
"THEOR. WEIGHT",
"ACTUAL WEIGHT"
],
"query": [],
"data": [],
"additions": [],
"description": "",
"format": ""
}
},
{
"FG INVENTORY": {
"columns": [
"FG INVENTORY",
"ON DATE",
"MTD",
"YTD"
],
"query": [],
"data": [],
"additions": [],
"description": "",
"format": ""
},
"PRODUCTION": {
"columns": [
"PRODUCTION",
"ON DATE",
"MTD",
"YTD"
],
"query": [],
"data": [],
"additions": [],
"description": "",
"format": ""
}
},
{
"CROP": {
"columns": [
"CROP",
"BANKS",
"TOTAL CELLS",
"CELL VOLTAGE (V)",
"T. SHORTS / CELL / CHECK"
],
"query": [],
"data": [],
"additions": [],
"description": "",
"format": ""
}
},
{
"STRIPPING TIME": {
"columns": [
"STRIPPING TIME",
"UOM",
"1st CROP - Sttripping time",
"2nd CROP - Change over time",
"3rd CROP",
"CSM Plate Rejection"
],
"query": [],
"data": [],
"additions": [],
"description": "",
"format": ""
}
},
{
"FILTER AVAILABILITY": {
"columns": [
"FILTER AVAILABILITY",
"ON DATE",
"MTD",
"Filtered Volume",
"Total Volume Refinery",
"ON DATE"
],
"query": [],
"data": [],
"additions": [],
"description": "",
"format": ""
}
},
{
"SPECIFIC ENERGY CONSUMPTION": {
"columns": [
"UTILITIES CONSUMPTIONS",
"UOM",
"NORMS",
"ON DATE",
"MTD",
"YTD"
],
"query": [],
"data": [],
"additions": [],
"description": "",
"format": ""
},
"SPECIFIC CONSUMPTION": {
"columns": [
"CONSUMABLES",
"UOM",
"NORMS",
"ON DATE",
"MTD",
"YTD"
],
"query": [],
"data": [],
"additions": [],
"description": "",
"format": ""
}
},
{
"ELECTROLYTE COMPOSITION": {
"columns": [
"ELECTROLYTE COMPOSITION",
"UOM",
"NORMS",
"DATE",
"CIR-1",
"CIR-2"
],
"query": [],
"data": [],
"additions": [],
"description": "",
"format": ""
},
"SUSPENDED SOLIDS": {
"columns": [
"SUSPENDED SOLIDS"
],
"query": [],
"data": [],
"additions": [],
"description": "",
"format": ""
}
},
{
"BLEEDING SECTION": {
"columns": [
"BLEEDING SECTION",
"UOM",
"ON DATE",
"MTD",
"YTD"
],
"query": [],
"data": [],
"additions": [],
"description": "",
"format": ""
},
"BANK / CROP": {
"columns": [
"BANK / CROP",
"TIME",
"WEIGHT (MT)",
"CELLS",
"READING"
],
"query": [
],
"data": [],
"additions": [],
"description": "",
"format": ""
}
},
{
"DEPARTMENT(M4)": {
"columns": [
"DEPARTMENT(M4)",
"EQUIPMENT DETAILS",
"EQUIPMENT LOCATION",
"DURATION",
"(MT) PRODUCTION",
"CAUSE OF THE BREAK DOWN"
],
"query": [],
"data": [],
"additions": [],
"description": "",
"format": ""
}
}
]
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment