Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
S
sterlite_custom_reports
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
CI / CD Analytics
Repository Analytics
Value Stream Analytics
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
suryakant
sterlite_custom_reports
Commits
72f96be0
Commit
72f96be0
authored
Oct 04, 2023
by
suryakant
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Sterlite Custom Report Updates
parent
b60bfd58
Changes
8
Hide whitespace changes
Inline
Side-by-side
Showing
8 changed files
with
726 additions
and
25 deletions
+726
-25
scripts/constants/__init__.py
scripts/constants/__init__.py
+13
-4
scripts/core/db/postgres/custom_report_query.py
scripts/core/db/postgres/custom_report_query.py
+225
-4
scripts/core/handler/event_handler.py
scripts/core/handler/event_handler.py
+129
-3
scripts/core/schemas/api/custom_report_model.py
scripts/core/schemas/api/custom_report_model.py
+7
-2
scripts/core/services/event_service.py
scripts/core/services/event_service.py
+11
-9
scripts/core/utilities/postgresql_db_utils.py
scripts/core/utilities/postgresql_db_utils.py
+5
-3
scripts/template/__init__.py
scripts/template/__init__.py
+0
-0
scripts/template/sterlite_report_template.py
scripts/template/sterlite_report_template.py
+336
-0
No files found.
scripts/constants/__init__.py
View file @
72f96be0
...
@@ -23,7 +23,7 @@ class APIConstants:
...
@@ -23,7 +23,7 @@ class APIConstants:
SHUTDOWN
=
"shutdown"
SHUTDOWN
=
"shutdown"
HEALTH_CHECK
=
"/healthcheck"
HEALTH_CHECK
=
"/healthcheck"
INIT_DB_ENDPOINT
=
"/
"
CUSTOM_REPORT_ENDPOINT
=
"/custom_report
"
class
CommonConstants
:
class
CommonConstants
:
...
@@ -32,15 +32,24 @@ class CommonConstants:
...
@@ -32,15 +32,24 @@ class CommonConstants:
"""
"""
GET
=
"GET"
GET
=
"GET"
POST
=
"POST"
POST
=
"POST"
EVENT_HANDLING_ENDPOINT
=
"Event Handler Endpoints"
CUSTOM_REPORT_TAG
=
"Event Handler Endpoints"
EXCEPTION_RAISER
=
"Exception ->{}"
EXCEPTION_RAISER
=
"Exception ->{}"
DEV_KEY
=
"dev"
DEV_KEY
=
"dev"
DATE_TIME_FORMAT
=
"
%
Y-
%
m-
%
d"
QUERY
=
"query"
DAY_START_DATE
=
"day_start_date"
DAY_END_DATE
=
"day_end_date"
MONTH_START_DATE
=
"month_start_date"
MONTH_END_DATE
=
"month_end_date"
YEAR_START_DATE
=
"year_start_date"
YEAR_END_DATE
=
"year_end_date"
class
PostgresConstants
:
class
ReportType
:
"""
"""
Constants related to
PostgreSQL databas
e
Constants related to
ReportTyp
e
"""
"""
REFINERY_REPORT
=
"refinery_report"
figlet
=
"""
figlet
=
"""
...
...
scripts/core/db/postgres/custom_report_query.py
View file @
72f96be0
class
SterliteRefineryQuery
:
"""
Refinery report queries
"""
class
AnodeAvailability
:
# QUERY FOR ANODE_AVAILABILITY, UOM, NORMS, ON_DATE
QUERY_AA
=
"""
SELECT
'Cell House Anode Availability' AS ANODE_AVAILABILITY,
'
%
' AS UOM,
AVG(ANODE.AA_PLANNED) AS NORMS,
(SUM(ANODE.NUMERATOR) / NULLIF(SUM(DENOMINATOR), 0)) * 100 AS ON_DATE
FROM
(SELECT ACTUAL.AA_NUMO AS NUMERATOR,
ACTUAL.AA_DENO AS DENOMINATOR,
PLAN.AA_PLANNED,
COALESCE(ACTUAL.DATE,
PLAN.DATE) AS date
FROM SEMANTIC_PROD.REFINERY_ANODE_AVAILABILITY_ACTUAL_VIEW ACTUAL
FULL JOIN
(SELECT DD_1.DATE_DT AS date,
M_PLAN.AA_PLANNED
FROM
(SELECT DATE(date) AS POSTING_DATE,
ANODE_AVAILABILITY_VALUE::numeric AS AA_PLANNED
FROM SEMANTIC_DEV.ANODE_AVAILABILITY
WHERE
LOWER(TRIM(ANODE_AVAILABILITY_SELECT)) =
'anode availability'
AND DATE_PART('day',date) = 1
GROUP BY 1,
2)M_PLAN
LEFT JOIN
(SELECT DIM_DATE.DATE_DT,
DIM_DATE.MONTH_OF_YR_NUM AS MNTH,
DIM_DATE.YR_NAME AS YR
FROM SEMANTIC_PROD.DIM_DATE) DD_1 ON
DD_1.MNTH = DATE_PART('month', M_PLAN.POSTING_DATE)
AND DD_1.YR = DATE_PART('year', M_PLAN.POSTING_DATE))PLAN
ON ACTUAL.DATE = PLAN.DATE)ANODE
WHERE
DATE BETWEEN '{day_start_date}' AND '{day_end_date}'
GROUP BY
1,2;
"""
QUERY_MTD
=
"""
SELECT
'Cell House Anode Availability' AS ANODE_AVAILABILITY,
'
%
' AS UOM,
(SUM(ANODE.NUMERATOR) / NULLIF(SUM(DENOMINATOR), 0)) * 100 AS MTD
class
CustomReportQuery
:
FROM
"""
(SELECT ACTUAL.AA_NUMO AS NUMERATOR,
ACTUAL.AA_DENO AS DENOMINATOR,
PLAN.AA_PLANNED,
COALESCE(ACTUAL.DATE,
"""
PLAN.DATE) AS date
ANODE_AVAILABILITY_QUERY_1
=
""
FROM SEMANTIC_PROD.REFINERY_ANODE_AVAILABILITY_ACTUAL_VIEW ACTUAL
\ No newline at end of file
FULL JOIN
(SELECT DD_1.DATE_DT AS date,
M_PLAN.AA_PLANNED
FROM
(SELECT DATE(date) AS POSTING_DATE,
ANODE_AVAILABILITY_VALUE::numeric AS AA_PLANNED
FROM SEMANTIC_DEV.ANODE_AVAILABILITY
WHERE
LOWER(TRIM(ANODE_AVAILABILITY_SELECT)) =
'anode availability'
AND DATE_PART('day',date) = 1
GROUP BY 1,
2)M_PLAN
LEFT JOIN
(SELECT DIM_DATE.DATE_DT,
DIM_DATE.MONTH_OF_YR_NUM AS MNTH,
DIM_DATE.YR_NAME AS YR
FROM SEMANTIC_PROD.DIM_DATE) DD_1 ON
DD_1.MNTH = DATE_PART('month', M_PLAN.POSTING_DATE)
AND DD_1.YR = DATE_PART('year', M_PLAN.POSTING_DATE))PLAN
ON ACTUAL.DATE = PLAN.DATE)ANODE
WHERE
DATE BETWEEN '{month_start_date}' AND '{month_end_date}'
GROUP BY
1,2;
"""
QUERY_YTD
=
"""
SELECT
'Cell House Anode Availability' AS ANODE_AVAILABILITY,
'
%
' AS UOM,
(SUM(ANODE.NUMERATOR) / NULLIF(SUM(DENOMINATOR), 0)) * 100 AS YTD
FROM
(SELECT ACTUAL.AA_NUMO AS NUMERATOR,
ACTUAL.AA_DENO AS DENOMINATOR,
PLAN.AA_PLANNED,
COALESCE(ACTUAL.DATE,
PLAN.DATE) AS date
FROM SEMANTIC_PROD.REFINERY_ANODE_AVAILABILITY_ACTUAL_VIEW ACTUAL
FULL JOIN
(SELECT DD_1.DATE_DT AS date,
M_PLAN.AA_PLANNED
FROM
(SELECT DATE(date) AS POSTING_DATE,
ANODE_AVAILABILITY_VALUE::numeric AS AA_PLANNED
FROM SEMANTIC_DEV.ANODE_AVAILABILITY
WHERE
LOWER(TRIM(ANODE_AVAILABILITY_SELECT)) =
'anode availability'
AND DATE_PART('day',date) = 1
GROUP BY 1,
2)M_PLAN
LEFT JOIN
(SELECT DIM_DATE.DATE_DT,
DIM_DATE.MONTH_OF_YR_NUM AS MNTH,
DIM_DATE.YR_NAME AS YR
FROM SEMANTIC_PROD.DIM_DATE) DD_1 ON
DD_1.MNTH = DATE_PART('month', M_PLAN.POSTING_DATE)
AND DD_1.YR = DATE_PART('year', M_PLAN.POSTING_DATE))PLAN
ON ACTUAL.DATE = PLAN.DATE)ANODE
WHERE
DATE BETWEEN '{year_start_date}' AND '{year_end_date}'
GROUP BY
1,2;
"""
class
DOCellsInOperation
:
QUERY_AA
=
"""
SELECT
'DO Cells In Operation' AS ANODE_AVAILABILITY,
'No.' AS UOM,
MAX(CASE
WHEN LOWER(TRIM(KPI)) = 'liberator_cells_plan' THEN METRIC_VALUE END)
AS NORMS,
MAX(CASE
WHEN LOWER(TRIM(KPI)) = 'liberator_cells_actual' THEN METRIC_VALUE END)
AS ON_DATE,
MAX(CASE
WHEN LOWER(TRIM(KPI)) = 'liberator_cells_actual' THEN METRIC_VALUE END)
AS MTD,
MAX(CASE
WHEN LOWER(TRIM(KPI)) = 'liberator_cells_actual' THEN METRIC_VALUE END)
AS YTD
FROM
SEMANTIC_PROD.BANKS_CELLS_LIBERATORS_VIEW
GROUP BY
1,2
"""
class
TotalCellsInOperation
:
QUERY_1
=
"""
SELECT
'Total Cells In Operation' AS ANODE_AVAILABILITY,
'No.' AS UOM,
MAX(NORMS) AS NORMS,
MAX(METRIC_VALUE) AS ON_DATE
FROM
(SELECT KPI,
CASE
WHEN LOWER(TRIM(KPI)) = 'operational_cells_plan' THEN ROUND(AVG(METRIC_VALUE))
ELSE NULL
END AS NORMS,
CASE
WHEN LOWER(TRIM(KPI)) = 'operational_cells_actual' THEN ROUND(AVG(METRIC_VALUE))
ELSE NULL
END AS METRIC_VALUE
FROM SEMANTIC_PROD.REFINERY_OPERATIONAL_CELLS_VIEW
WHERE LOWER(TRIM(KPI)) in ('operational_cells_actual', 'operational_cells_plan')
AND (date BETWEEN '{day_start_date}' AND '{day_end_date}')
GROUP BY 1)OP_CELLS
GROUP BY 1,2
"""
QUERY_2
=
"""
SELECT
'Total Cells In Operation' AS ANODE_AVAILABILITY,
'No.' AS UOM,
MAX(METRIC_VALUE) AS MTD
FROM
(SELECT KPI,
CASE
WHEN LOWER(TRIM(KPI)) = 'operational_cells_plan' THEN ROUND(AVG(METRIC_VALUE))
ELSE NULL
END AS NORMS,
CASE
WHEN LOWER(TRIM(KPI)) = 'operational_cells_actual' THEN ROUND(AVG(METRIC_VALUE))
ELSE NULL
END AS METRIC_VALUE
FROM SEMANTIC_PROD.REFINERY_OPERATIONAL_CELLS_VIEW
WHERE LOWER(TRIM(KPI)) in ('operational_cells_actual', 'operational_cells_plan')
AND (date BETWEEN '{month_start_date}' AND '{month_end_date}')
GROUP BY 1)OP_CELLS
GROUP BY 1,2
"""
QUERY_3
=
"""
SELECT
'Total Cells In Operation' AS ANODE_AVAILABILITY,
'No.' AS UOM,
MAX(METRIC_VALUE) AS YTD
FROM
(SELECT KPI,
CASE
WHEN LOWER(TRIM(KPI)) = 'operational_cells_plan' THEN ROUND(AVG(METRIC_VALUE))
ELSE NULL
END AS NORMS,
CASE
WHEN LOWER(TRIM(KPI)) = 'operational_cells_actual' THEN ROUND(AVG(METRIC_VALUE))
ELSE NULL
END AS METRIC_VALUE
FROM SEMANTIC_PROD.REFINERY_OPERATIONAL_CELLS_VIEW
WHERE LOWER(TRIM(KPI)) in ('operational_cells_actual', 'operational_cells_plan')
AND (date BETWEEN '{year_start_date}' AND '{year_end_date}')
GROUP BY 1)OP_CELLS
GROUP BY 1,2
"""
\ No newline at end of file
scripts/core/handler/event_handler.py
View file @
72f96be0
from
scripts.constants
import
PostgresConstants
,
CommonConstants
import
pandas
as
pd
from
scripts.configurations
import
postgres_details
from
datetime
import
datetime
from
scripts.constants
import
ReportType
,
CommonConstants
from
scripts.template.sterlite_report_template
import
SterliteRefineryTemplate
from
scripts.core.logging.application_logging
import
logger
from
scripts.core.logging.application_logging
import
logger
from
scripts.core.exception.app_exceptions
import
GeneralException
from
scripts.core.utilities.postgresql_db_utils
import
PostgresDBUtility
from
scripts.core.utilities.postgresql_db_utils
import
PostgresDBUtility
class
CustomReport
:
class
CustomReport
Handler
:
def
__init__
(
self
):
def
__init__
(
self
):
self
.
postgres_db_obj
=
PostgresDBUtility
()
self
.
postgres_db_obj
=
PostgresDBUtility
()
def
create_custom_date_filter
(
self
,
input_json
):
"""
This method convert start date and end date to a date range.
:param input_json:
:return:
"""
date_range_list
=
[]
# Start date
start_date
=
datetime
.
strptime
(
input_json
[
"property"
][
"start_date"
],
CommonConstants
.
DATE_TIME_FORMAT
)
# End date
end_date
=
datetime
.
strptime
(
input_json
[
"property"
][
"end_date"
],
CommonConstants
.
DATE_TIME_FORMAT
)
logger
.
info
(
f
"Creating list of dates starting from {start_date} to {end_date}"
)
date_list
=
pd
.
date_range
(
start_date
,
end_date
,
freq
=
'D'
)
# Iterating and creating where clause filters
# Output - {'day_start_date': 'YYYY-MM-DD', 'day_end_date': 'YYYY-MM-DD',
# 'month_start_date': 'YYYY-MM-DD', 'month_end_date': 'YYYY-MM-DD',
# 'year_start_date': 'YYYY-MM-DD', 'year_end_date': 'YYYY-MM-DD'}
for
each_dates
in
date_list
.
strftime
(
CommonConstants
.
DATE_TIME_FORMAT
)
.
to_list
():
# To get the financial year
financial_year
=
None
date_obj
=
datetime
.
strptime
(
each_dates
,
CommonConstants
.
DATE_TIME_FORMAT
)
if
date_obj
.
month
>=
4
:
financial_year
=
str
(
date_obj
.
year
)
elif
date_obj
.
month
<
4
:
financial_year
=
str
(
date_obj
.
year
-
1
)
date_range_list
.
append
(
dict
(
day_start_date
=
each_dates
,
day_end_date
=
each_dates
,
month_start_date
=
each_dates
[:
-
2
]
+
"01"
,
month_end_date
=
each_dates
,
year_start_date
=
financial_year
+
"-04-01"
,
year_end_date
=
each_dates
,
)
)
return
date_range_list
def
get_queries_from_db
(
self
,
input_json
,
date_filter
):
"""
:param input_json:
:param date_filter:
:return:
"""
for
each_blocks
in
input_json
:
# Iterating each blocks for fetching query
print
(
each_blocks
)
if
input_json
[
each_blocks
][
CommonConstants
.
QUERY
]:
for
each_kpi
in
input_json
[
each_blocks
][
CommonConstants
.
QUERY
]:
temp_data_dict
=
dict
()
# Iterating each query for each KPI
for
each_query
in
\
input_json
[
each_blocks
][
CommonConstants
.
QUERY
][
each_kpi
]:
query
=
each_query
.
\
format
(
day_start_date
=
date_filter
[
CommonConstants
.
DAY_START_DATE
],
day_end_date
=
date_filter
[
CommonConstants
.
DAY_END_DATE
],
month_start_date
=
date_filter
[
CommonConstants
.
MONTH_START_DATE
],
month_end_date
=
date_filter
[
CommonConstants
.
MONTH_END_DATE
],
year_start_date
=
date_filter
[
CommonConstants
.
YEAR_START_DATE
],
year_end_date
=
date_filter
[
CommonConstants
.
YEAR_END_DATE
]
)
response
=
self
.
postgres_db_obj
.
fetch_data
(
query
=
query
)
if
response
:
temp_data_dict
.
update
(
dict
(
response
[
0
]))
if
not
temp_data_dict
:
# Creating null values if no data
for
each_columns
in
input_json
[
each_blocks
][
"data_column"
]:
temp_data_dict
.
update
({
each_columns
:
None
})
input_json
[
each_blocks
][
"data"
]
.
append
(
temp_data_dict
)
else
:
temp_data_dict
=
dict
()
for
each_columns
in
input_json
[
each_blocks
][
"data_column"
]:
temp_data_dict
.
update
(
{
each_columns
:
None
}
)
input_json
[
each_blocks
][
"data"
]
.
append
(
temp_data_dict
)
return
input_json
def
custom_report_handler
(
self
,
input_json
):
"""
:param input_json:
:return:
"""
status
=
False
message
=
"Error generating a message"
data
=
"Data"
try
:
# if str(input_json.job_type).lower() == ReportType.REFINERY_REPORT:
if
str
(
input_json
[
"job_type"
])
.
lower
()
==
ReportType
.
REFINERY_REPORT
:
date_filter
=
self
.
create_custom_date_filter
(
input_json
=
input_json
)
for
each_date_range
in
date_filter
:
# Iterating over sterlite json file
for
each_blocks
in
SterliteRefineryTemplate
.
REPORT_TEMPLATE
:
# Getting the data from queries
each_blocks
=
self
.
get_queries_from_db
(
input_json
=
each_blocks
,
date_filter
=
each_date_range
)
# print(each_blocks)
print
(
"========================================="
)
break
except
GeneralException
as
err
:
logger
.
error
(
f
"Exception in custom_report_handler: {err}"
)
return
status
,
message
,
data
scripts/core/schemas/api/custom_report_model.py
View file @
72f96be0
...
@@ -4,8 +4,13 @@ from pydantic import BaseModel
...
@@ -4,8 +4,13 @@ from pydantic import BaseModel
class
ReportInput
(
BaseModel
):
class
ReportInput
(
BaseModel
):
from_date
:
Optional
[
str
]
job_id
:
Optional
[
str
]
end_date
:
Optional
[
str
]
user_id
:
Optional
[
str
]
report
:
Optional
[
dict
]
property
:
Optional
[
dict
]
job_type
:
Optional
[
str
]
tz
:
Optional
[
str
]
file_name
:
Optional
[
str
]
class
ReportOutput
(
BaseModel
):
class
ReportOutput
(
BaseModel
):
...
...
scripts/core/services/event_service.py
View file @
72f96be0
...
@@ -12,7 +12,7 @@ Usage:
...
@@ -12,7 +12,7 @@ Usage:
"""
"""
from
fastapi
import
APIRouter
from
fastapi
import
APIRouter
from
scripts.configurations
import
service_details
from
scripts.configurations
import
service_details
from
scripts.core.handler.event_handler
import
CustomReport
from
scripts.core.handler.event_handler
import
CustomReport
Handler
from
scripts.core.logging.application_logging
import
logger
from
scripts.core.logging.application_logging
import
logger
from
scripts.constants
import
APIConstants
,
CommonConstants
,
figlet
from
scripts.constants
import
APIConstants
,
CommonConstants
,
figlet
from
scripts.core.schemas.api
import
(
from
scripts.core.schemas.api
import
(
...
@@ -20,8 +20,8 @@ from scripts.core.schemas.api import (
...
@@ -20,8 +20,8 @@ from scripts.core.schemas.api import (
custom_report_output_model
custom_report_output_model
)
)
event_handler_obj
=
CustomReport
()
report_handler_obj
=
CustomReportHandler
()
event_router
=
APIRouter
(
tags
=
[
CommonConstants
.
EVENT_HANDLING_ENDPOINT
])
event_router
=
APIRouter
(
tags
=
[
CommonConstants
.
CUSTOM_REPORT_TAG
])
@
event_router
.
on_event
(
APIConstants
.
STARTUP
)
@
event_router
.
on_event
(
APIConstants
.
STARTUP
)
...
@@ -50,23 +50,25 @@ async def ping():
...
@@ -50,23 +50,25 @@ async def ping():
@
event_router
.
post
(
@
event_router
.
post
(
APIConstants
.
INIT_DB
_ENDPOINT
,
response_model
=
custom_report_output_model
)
APIConstants
.
CUSTOM_REPORT
_ENDPOINT
,
response_model
=
custom_report_output_model
)
async
def
initialize_db
(
input_json
:
custom_report_input_model
):
async
def
custom_report_function
(
input_json
:
custom_report_input_model
):
"""
"""
Initiate postgres db and create tables
Initiate postgres db and create tables
Args:
Args:
content (
InitDb
Input): Request body containing the necessary parameters.
content (
Report
Input): Request body containing the necessary parameters.
Returns:
Returns:
:param :
:param :
input_json
"""
"""
try
:
try
:
return
{
"status"
:
True
,
"message"
:
""
}
status
,
message
,
data
=
report_handler_obj
.
custom_report_handler
(
input_json
=
input_json
)
return
{
"status"
:
status
,
"message"
:
message
,
"data"
:
data
}
except
Exception
as
err
:
except
Exception
as
err
:
logger
.
exception
(
logger
.
exception
(
CommonConstants
.
EXCEPTION_RAISER
.
format
(
str
(
err
)),
CommonConstants
.
EXCEPTION_RAISER
.
format
(
str
(
err
)),
exc_info
=
service_details
.
exception_trace
,
exc_info
=
service_details
.
exception_trace
,
)
)
return
{
"status"
:
False
,
"message"
:
str
(
err
)}
scripts/core/utilities/postgresql_db_utils.py
View file @
72f96be0
...
@@ -34,7 +34,8 @@ class PostgresDBUtility:
...
@@ -34,7 +34,8 @@ class PostgresDBUtility:
This method is used for selecting records from tables.
This method is used for selecting records from tables.
:param query: The select query to be executed
:param query: The select query to be executed
:param db: Session
:param db: Session
:return: status: The status True on success and False on failure and the list of rows
:return: status: The status True on success and False on failure and
the list of rows
"""
"""
logger
.
debug
(
f
" SQL QUERY {query}"
)
logger
.
debug
(
f
" SQL QUERY {query}"
)
connection
=
None
connection
=
None
...
@@ -60,7 +61,8 @@ class PostgresDBUtility:
...
@@ -60,7 +61,8 @@ class PostgresDBUtility:
"""
"""
This method is used for selecting records from tables.
This method is used for selecting records from tables.
:param query: The select query to be executed
:param query: The select query to be executed
:return: status: The status True on success and False on failure and the list of rows
:return: status: The status True on success and False on failure and
the list of rows
"""
"""
connection
=
None
connection
=
None
result
=
""
result
=
""
...
@@ -244,7 +246,7 @@ class PostgresDBUtility:
...
@@ -244,7 +246,7 @@ class PostgresDBUtility:
result
=
[]
result
=
[]
try
:
try
:
connection
=
self
.
create_connection
()
connection
=
self
.
create_connection
()
cursor
=
connection
.
cursor
()
cursor
=
connection
.
cursor
(
cursor_factory
=
self
.
cursor_type
)
cursor
.
execute
(
query
)
cursor
.
execute
(
query
)
result
=
cursor
.
fetchall
()
result
=
cursor
.
fetchall
()
except
Exception
as
e
:
except
Exception
as
e
:
...
...
scripts/template/__init__.py
0 → 100644
View file @
72f96be0
scripts/template/sterlite_report_template.py
0 → 100644
View file @
72f96be0
from
scripts.core.db.postgres.custom_report_query
import
SterliteRefineryQuery
class
SterliteRefineryTemplate
:
REPORT_TEMPLATE
=
[
{
"ANODE AVAILABILITY"
:
{
"columns"
:
[
"ANODE AVAILABILITY"
,
"UOM"
,
"NORMS (Month)"
,
"ON DATE(Day)"
,
"MTD"
,
"YTD"
],
"query"
:
{
"ANODE_AVAILABILITY"
:
[
SterliteRefineryQuery
.
AnodeAvailability
.
QUERY_AA
,
SterliteRefineryQuery
.
AnodeAvailability
.
QUERY_MTD
,
SterliteRefineryQuery
.
AnodeAvailability
.
QUERY_YTD
],
"DO CELLS IN OPERATION"
:
[
SterliteRefineryQuery
.
DOCellsInOperation
.
QUERY_AA
],
"Total Cells In Operation"
:
[
SterliteRefineryQuery
.
TotalCellsInOperation
.
QUERY_1
,
SterliteRefineryQuery
.
TotalCellsInOperation
.
QUERY_2
,
SterliteRefineryQuery
.
TotalCellsInOperation
.
QUERY_3
]
},
"data"
:
[],
"data_column"
:
[
"anode_availability"
,
"uom"
,
"norms"
,
"on_date"
,
"mtd"
,
"ytd"
],
"description"
:
""
,
"format"
:
""
},
"SAFETY REPORT"
:
{
"columns"
:
[
"SAFETY REPORT"
,
"UOM"
,
"NORMS"
,
"ON DATE"
,
"MTD"
,
"YTD"
],
"query"
:
[],
"data"
:
[],
"data_column"
:
[
"safety_report"
,
"uom"
,
"norms"
,
"on_date"
,
"mtd"
,
"ytd"
],
"additions"
:
[],
"description"
:
""
,
"format"
:
""
}
},
{
"POWER AVAILABILITY"
:
{
"columns"
:
[
"POWER AVAILABILITY"
,
"UOM"
,
"NORMS"
,
"ON DATE"
,
"MTD"
,
"YTD"
],
"query"
:
[],
"data"
:
[],
"additions"
:
[],
"description"
:
""
,
"format"
:
""
},
"RM ANODE RECEIPT DETAILS"
:
{
"columns"
:
[
"RM ANODE RECEIPT DETAILS"
,
"ON DATE"
,
"MTD"
,
"YTD"
,
"Material"
,
"OPENING STOCK"
],
"query"
:
[],
"data"
:
[],
"additions"
:
[],
"description"
:
""
,
"format"
:
""
}
},
{
"EFFICIENCIES"
:
{
"columns"
:
[
"EFFICIENCIES"
,
"UOM"
,
"NORMS"
,
"ON DATE"
,
"MTD"
,
"YTD"
],
"query"
:
[],
"data"
:
[],
"additions"
:
[],
"description"
:
""
,
"format"
:
""
},
"BANK / CROP"
:
{
"columns"
:
[
"BANK / CROP"
,
"CE (
%
)"
,
"THEOR. WEIGHT"
,
"ACTUAL WEIGHT"
,
"STRIPPING TIME"
],
"query"
:
[],
"data"
:
[],
"additions"
:
[],
"description"
:
""
,
"format"
:
""
}
},
{
"PRODUCTION"
:
{
"columns"
:
[
"PRODUCTION"
,
"UOM"
,
"NORMS"
,
"ON DATE"
,
"MTD"
,
"YTD"
],
"query"
:
[],
"data"
:
[],
"additions"
:
[],
"description"
:
""
,
"format"
:
""
},
"CIRCULATION"
:
{
"columns"
:
[
"CIRCULATION"
,
"CE
%
"
,
"THEOR. WEIGHT"
,
"ACTUAL WEIGHT"
],
"query"
:
[],
"data"
:
[],
"additions"
:
[],
"description"
:
""
,
"format"
:
""
}
},
{
"FG INVENTORY"
:
{
"columns"
:
[
"FG INVENTORY"
,
"ON DATE"
,
"MTD"
,
"YTD"
],
"query"
:
[],
"data"
:
[],
"additions"
:
[],
"description"
:
""
,
"format"
:
""
},
"PRODUCTION"
:
{
"columns"
:
[
"PRODUCTION"
,
"ON DATE"
,
"MTD"
,
"YTD"
],
"query"
:
[],
"data"
:
[],
"additions"
:
[],
"description"
:
""
,
"format"
:
""
}
},
{
"CROP"
:
{
"columns"
:
[
"CROP"
,
"BANKS"
,
"TOTAL CELLS"
,
"CELL VOLTAGE (V)"
,
"T. SHORTS / CELL / CHECK"
],
"query"
:
[],
"data"
:
[],
"additions"
:
[],
"description"
:
""
,
"format"
:
""
}
},
{
"STRIPPING TIME"
:
{
"columns"
:
[
"STRIPPING TIME"
,
"UOM"
,
"1st CROP - Sttripping time"
,
"2nd CROP - Change over time"
,
"3rd CROP"
,
"CSM Plate Rejection"
],
"query"
:
[],
"data"
:
[],
"additions"
:
[],
"description"
:
""
,
"format"
:
""
}
},
{
"FILTER AVAILABILITY"
:
{
"columns"
:
[
"FILTER AVAILABILITY"
,
"ON DATE"
,
"MTD"
,
"Filtered Volume"
,
"Total Volume Refinery"
,
"ON DATE"
],
"query"
:
[],
"data"
:
[],
"additions"
:
[],
"description"
:
""
,
"format"
:
""
}
},
{
"SPECIFIC ENERGY CONSUMPTION"
:
{
"columns"
:
[
"UTILITIES CONSUMPTIONS"
,
"UOM"
,
"NORMS"
,
"ON DATE"
,
"MTD"
,
"YTD"
],
"query"
:
[],
"data"
:
[],
"additions"
:
[],
"description"
:
""
,
"format"
:
""
},
"SPECIFIC CONSUMPTION"
:
{
"columns"
:
[
"CONSUMABLES"
,
"UOM"
,
"NORMS"
,
"ON DATE"
,
"MTD"
,
"YTD"
],
"query"
:
[],
"data"
:
[],
"additions"
:
[],
"description"
:
""
,
"format"
:
""
}
},
{
"ELECTROLYTE COMPOSITION"
:
{
"columns"
:
[
"ELECTROLYTE COMPOSITION"
,
"UOM"
,
"NORMS"
,
"DATE"
,
"CIR-1"
,
"CIR-2"
],
"query"
:
[],
"data"
:
[],
"additions"
:
[],
"description"
:
""
,
"format"
:
""
},
"SUSPENDED SOLIDS"
:
{
"columns"
:
[
"SUSPENDED SOLIDS"
],
"query"
:
[],
"data"
:
[],
"additions"
:
[],
"description"
:
""
,
"format"
:
""
}
},
{
"BLEEDING SECTION"
:
{
"columns"
:
[
"BLEEDING SECTION"
,
"UOM"
,
"ON DATE"
,
"MTD"
,
"YTD"
],
"query"
:
[],
"data"
:
[],
"additions"
:
[],
"description"
:
""
,
"format"
:
""
},
"BANK / CROP"
:
{
"columns"
:
[
"BANK / CROP"
,
"TIME"
,
"WEIGHT (MT)"
,
"CELLS"
,
"READING"
],
"query"
:
[
],
"data"
:
[],
"additions"
:
[],
"description"
:
""
,
"format"
:
""
}
},
{
"DEPARTMENT(M4)"
:
{
"columns"
:
[
"DEPARTMENT(M4)"
,
"EQUIPMENT DETAILS"
,
"EQUIPMENT LOCATION"
,
"DURATION"
,
"(MT) PRODUCTION"
,
"CAUSE OF THE BREAK DOWN"
],
"query"
:
[],
"data"
:
[],
"additions"
:
[],
"description"
:
""
,
"format"
:
""
}
}
]
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment