Commit e5e4f7df authored by suryakant's avatar suryakant

Trends Dashboard

parent edfe1e7a
...@@ -13,7 +13,8 @@ class APIConstants: ...@@ -13,7 +13,8 @@ class APIConstants:
SHUTDOWN = "shutdown" SHUTDOWN = "shutdown"
HEALTH_CHECK = "/healthcheck" HEALTH_CHECK = "/healthcheck"
TRENDS_ASYNC_SERVICE = "/trends_async" DASHBOARD_METADATA_ENDPOINT = "/get_filter"
DASHBOARD_DATA_ENDPOINT = "/get_data"
class MainConstants: class MainConstants:
...@@ -35,6 +36,13 @@ class Constants: ...@@ -35,6 +36,13 @@ class Constants:
""" """
EXCEPTION_RAISER = "Exception ->{}" EXCEPTION_RAISER = "Exception ->{}"
TRENDS_HANDLING_ENDPOINT = "Trends Handler Endpoints" TRENDS_HANDLING_ENDPOINT = "Trends Handler Endpoints"
DATA = "data"
LINE = "line"
DEPARTMENT = "department"
FORM = "form"
FUNCTION = "function"
VALUES = "values"
FILTER_NAME = "filter_name"
class LoggerConstants: class LoggerConstants:
......
...@@ -4,7 +4,8 @@ from sqlalchemy.pool import NullPool ...@@ -4,7 +4,8 @@ from sqlalchemy.pool import NullPool
from scripts.configurations import postgres_details from scripts.configurations import postgres_details
from scripts.core.logging.application_logging import logger from scripts.core.logging.application_logging import logger
from sqlalchemy_utils import create_database, database_exists from sqlalchemy_utils import create_database, database_exists
from scripts.core.schemas.postgres.postgres_tables import TrendsMasterTable from scripts.core.schemas.postgres.postgres_tables import TrendsMasterTable, \
TrendsFormTable
from scripts.core.exception.app_exceptions import ErrorMessages, GeneralException from scripts.core.exception.app_exceptions import ErrorMessages, GeneralException
...@@ -12,7 +13,6 @@ def database_init(): ...@@ -12,7 +13,6 @@ def database_init():
""" """
:Objective: To create default postgres tables :Objective: To create default postgres tables
""" """
database_obj = None
try: try:
# Creating engine for postgres # Creating engine for postgres
engine = create_engine(url=postgres_details.uri, poolclass=NullPool) engine = create_engine(url=postgres_details.uri, poolclass=NullPool)
...@@ -23,9 +23,11 @@ def database_init(): ...@@ -23,9 +23,11 @@ def database_init():
create_database(engine.url) create_database(engine.url)
TrendsMasterTable.__table__.create(bind=engine, checkfirst=True) TrendsMasterTable.__table__.create(bind=engine, checkfirst=True)
TrendsFormTable.__table__.create(bind=engine, checkfirst=True)
logger.info("Tables initiation successful") logger.info("Tables initiation successful")
TrendsMasterTable.index_name.create(bind=engine, checkfirst=True) # TrendsMasterTable.index_name.create(bind=engine, checkfirst=True)
# TrendsFormTable.index_name.create(bind=engine, checkfirst=True)
# Creating database object for CRUD operation # Creating database object for CRUD operation
database_obj = session_local() database_obj = session_local()
......
from sqlalchemy import desc, select, func from scripts.constants import Constants
from sqlalchemy import select, and_, cast, Text
def get_unique_departments(table):
return select(
table.department.label(Constants.DATA)
).distinct()
def get_unique_function(table, department):
return select(
table.function.label(Constants.DATA)
).distinct().where(
table.department == department
)
def get_unique_form(table, department, function):
return select(
table.form.label(Constants.DATA)
).distinct().where(
and_(
table.department == department,
table.function == function
))
def get_unique_filters(table, department, function, form):
return select(
cast(table.filter, Text).label(Constants.DATA)
).distinct().where(
and_(
table.department == department,
table.function == function,
table.form == form
)
)
...@@ -2,11 +2,15 @@ import copy ...@@ -2,11 +2,15 @@ import copy
from datetime import datetime from datetime import datetime
from scripts.constants import Constants from scripts.constants import Constants
from scripts.core.db.postgres import database_init from scripts.core.db.postgres import database_init
from scripts.core.db.postgres.psql_query import (
get_unique_departments, get_unique_function,
get_unique_form, get_unique_filters
)
from scripts.core.schemas.postgres import TableObject from scripts.core.schemas.postgres import TableObject
from scripts.core.logging.application_logging import logger from scripts.core.logging.application_logging import logger
from scripts.core.exception.app_exceptions import GeneralException from scripts.core.exception.app_exceptions import GeneralException
from scripts.core.schemas.postgres.postgres_tables import \ from scripts.core.schemas.postgres.postgres_tables import \
TrendsMasterTable TrendsMasterTable, TrendsFormTable
class TrendsDashboardHandler: class TrendsDashboardHandler:
...@@ -14,76 +18,75 @@ class TrendsDashboardHandler: ...@@ -14,76 +18,75 @@ class TrendsDashboardHandler:
Class responsible for creating tables and performing calculations for Class responsible for creating tables and performing calculations for
PepsiCo metrics. PepsiCo metrics.
""" """
def test(self):
print("hello World")
def get_max_score_data(self, db_init): def get_trends_metadata(self, request_data):
""" """
Method to get max scores for every year get_trends_metadata
:param db_init:
:return:
""" """
max_score_json = {} filter_flag = False
max_score_obj = TableObject( final_metadata_json = dict(
db=db_init, table_name=TrendsMasterTable values=[]
) )
# max_score_query = fetch_max_score_downday_query(
# table=max_score_obj.table,
# )
max_score_query = ""
response = max_score_obj.execute_query(query=max_score_query) logger.info("Database initialization")
if response: db_init = database_init()
for item in response:
max_score_json[item['year']] = {'max': str(item['max'])}
return max_score_json # Creating table object
trends_master_tbl_obj = TableObject(
db=db_init, table_name=TrendsMasterTable
)
def bulk_upsert(self, session, table, data_list, primary_key): if request_data.department and request_data.function and request_data.form:
""" filter_flag = True
Method to perform bulk upsert operation. filter_name = Constants.LINE
filter_query = get_unique_filters(
table=TrendsMasterTable,
department=request_data.department,
function=request_data.function,
form=request_data.form
)
Args: elif request_data.department and request_data.function:
session (Session): Database session. filter_name = Constants.FORM
table (Table): SQLAlchemy Table object. filter_query = get_unique_form(
data_list (list): List of data to be upsert. table=TrendsMasterTable,
primary_key (str): Primary key of the table. department=request_data.department,
function=request_data.function
)
Returns: elif request_data.department:
bool: True if upsert operation is successful, False otherwise. filter_name = Constants.FUNCTION
""" filter_query = get_unique_function(
try: table=TrendsMasterTable,
existing_ids = session.query(table.date).filter( department=request_data.department
table.date.in_( )
[item[primary_key] for item in data_list])).all()
existing_ids = [id_[0] for id_ in existing_ids]
# # Filter out existing IDs before inserting else:
# unique_items = [item for item in data_list if filter_name = Constants.DEPARTMENT
# item[primary_key] not in existing_ids] filter_query = get_unique_departments(
table=TrendsMasterTable
)
# Filter out existing IDs before inserting # Getting response from the Trends Master Table
# Updating the unique_items list to replace empty strings with None response_data = trends_master_tbl_obj.execute_query(
new_unique_items = [] query=filter_query
for item in data_list: )
new_item = {}
for key, value in item.items():
if value == '':
new_item[key] = None
else:
new_item[key] = value
if item[primary_key] not in existing_ids:
new_unique_items.append(new_item)
session.bulk_insert_mappings(table, new_unique_items) if response_data and not filter_flag:
final_metadata_json[Constants.FILTER_NAME] = filter_name
for each_metadata in response_data:
final_metadata_json[Constants.VALUES].append(
dict(
key=each_metadata[Constants.DATA],
label=each_metadata[Constants.DATA],
)
)
instances = [table(**item) for item in new_unique_items] return final_metadata_json
session.add_all(instances)
session.commit()
session.close()
return True def get_trends_data(self, request_data):
except Exception as e: """
session.rollback() Docstring
raise RuntimeError(f"Error during upsert operation: {e}") """
return True
from .trends_schema import ( from .trends_schema import (
TrendsAsyncInput, DashboardFilterInput,
TrendsAsyncOutput, DashboardFilterOutput,
) )
trends_async_request = TrendsAsyncInput trends_request = DashboardFilterInput
trends_async_response = TrendsAsyncOutput trends_response = DashboardFilterOutput
from __future__ import annotations from __future__ import annotations
import json
from datetime import datetime from datetime import datetime
from typing import Optional, Any from typing import Optional, Any
from pydantic import BaseModel from pydantic import BaseModel
import json
class TrendsAsyncInput(BaseModel): class DashboardFilterInput(BaseModel):
date: datetime department: Optional[str] = None
line: str function: Optional[str] = None
form: Optional[str] = None
class DashboardFilterOutput(BaseModel):
filter_name: str
values: list
class DashboardFormInput(BaseModel):
start_date: Optional[str] = None
end_date: Optional[str] = None
mapping_id: Optional[str] = None
department: Optional[str] = None
function: Optional[str] = None
form: Optional[str] = None
parameter: Optional[str] = None
class TrendsAsyncOutput(BaseModel): class DashboardFormOutput(BaseModel):
status: bool filter_name: str
message: str values: list
from sqlalchemy import VARCHAR, Column, Integer, Index, FLOAT, BOOLEAN from sqlalchemy import VARCHAR, Column, Integer, Index, FLOAT, BOOLEAN, TIMESTAMP, JSON
from datetime import datetime
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base() Base = declarative_base()
class TrendsMasterTable(Base): class TrendsMasterTable(Base):
__tablename__ = "trends_master_tbl" __tablename__ = "trends_master_tbl_"
id = Column(Integer, primary_key=True, autoincrement=True) id = Column(Integer, primary_key=True, autoincrement=True)
mapping_id = Column(VARCHAR, nullable=True) mapping_id = Column(VARCHAR, nullable=True)
department = Column(VARCHAR, nullable=True) department = Column(VARCHAR, nullable=True)
sub_menu = Column(VARCHAR, nullable=True) function = Column(VARCHAR, nullable=True)
form_id = Column(VARCHAR, nullable=True) form = Column(VARCHAR, nullable=True)
line_id = Column(VARCHAR, nullable=True) filter = Column(JSON, nullable=True)
equipment_id = Column(VARCHAR, nullable=True)
parameter = Column(VARCHAR, nullable=True) parameter = Column(VARCHAR, nullable=True)
trend_captured = Column(BOOLEAN, nullable=True) trend_captured = Column(BOOLEAN, nullable=True)
min = Column(FLOAT, nullable=True) lower_limit = Column(FLOAT, nullable=True)
max = Column(FLOAT, nullable=True) upper_value = Column(FLOAT, nullable=True)
actual_value = Column(FLOAT, nullable=True)
recheck_value = Column(FLOAT, nullable=True)
index_name = Index('trends_master_indx', mapping_id, department, # index_name = Index("trends_master_tbl_", mapping_id, department,
sub_menu, form_id, line_id, equipment_id, # function, form, filter, parameter)
parameter)
class TrendsFormTable(Base): class TrendsFormTable(Base):
__tablename__ = "trends_form_tbl" __tablename__ = "trends_form_tbl_"
id = Column(Integer, primary_key=True, autoincrement=True) id = Column(Integer, primary_key=True, autoincrement=True)
mapping_id = Column(VARCHAR, nullable=True) mapping_id = Column(VARCHAR, nullable=True)
time_stamp = Column(TIMESTAMP)
value = Column(VARCHAR, nullable=True) value = Column(VARCHAR, nullable=True)
time = Column(datetime, nullable=True)
index_name = Index('trends_form_indx', mapping_id, time) # index_name = Index("trends_form_tbl_", mapping_id, time_stamp)
...@@ -10,12 +10,14 @@ Usage: ...@@ -10,12 +10,14 @@ Usage:
from scripts.core.services.event_service import router from scripts.core.services.event_service import router
""" """
import json
from fastapi import APIRouter from fastapi import APIRouter
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
from scripts.constants import Constants, APIConstants from scripts.constants import Constants, APIConstants
from scripts.core.logging.application_logging import logger from scripts.core.logging.application_logging import logger
from scripts.core.schemas.api import ( from scripts.core.schemas.api import (
trends_async_request, trends_async_response trends_request, trends_response
) )
from scripts.core.handler.trends_handler import TrendsDashboardHandler from scripts.core.handler.trends_handler import TrendsDashboardHandler
...@@ -38,28 +40,48 @@ async def ping(): ...@@ -38,28 +40,48 @@ async def ping():
return dict(status=200) return dict(status=200)
@downday_router.post(APIConstants.TRENDS_ASYNC_SERVICE, @downday_router.post(APIConstants.DASHBOARD_METADATA_ENDPOINT,
response_model=trends_async_response) response_model=trends_response)
async def save_trends_data( async def dashboard_metadata(
request_data: trends_async_request request_data: trends_request
):
"""
Initiate postgres db and create tables
Args:
request_data (DashboardFilterInput): Request body containing the
necessary form parameters.
Returns:
"""
try:
trends_obj = TrendsDashboardHandler()
return trends_obj.get_trends_metadata(request_data=request_data)
except Exception as err:
logger.exception(
Constants.EXCEPTION_RAISER.format(str(err)),
exc_info=True,
)
return JSONResponse(status_code=500, content=str(err))
@downday_router.post(APIConstants.DASHBOARD_DATA_ENDPOINT,
response_model=trends_response)
async def dashboard_data(
request_data: trends_request
): ):
""" """
Initiate postgres db and create tables Initiate postgres db and create tables
Args: Args:
content (TrendsAsyncInput): Request body containing the request_data (DashboardFilterInput): Request body containing the
necessary form parameters. necessary form parameters.
Returns: Returns:
:param request_data:
""" """
try: try:
downday_obj = TrendsDashboardHandler() trends_obj = TrendsDashboardHandler()
status, data = downday_obj.test() return trends_obj.get_trends_data(request_data=request_data)
if status:
return dict(status=True, message=data)
else:
return dict(status=False, message=data)
except Exception as err: except Exception as err:
logger.exception( logger.exception(
Constants.EXCEPTION_RAISER.format(str(err)), Constants.EXCEPTION_RAISER.format(str(err)),
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment