Commit 6bc89d6c authored by aakash.bedi's avatar aakash.bedi

test

parent 33e97bf0
if __name__ == "__main__":
from dotenv import load_dotenv
load_dotenv(dotenv_path='config.env')
import pandas as pd
import xlsxwriter
import os
import yaml
from datetime import datetime
import pytz
from loguru import logger
from scripts.utils.yield_sheet_3cp_utils.all_tags_3cp import AllTags
from scripts.utils.yield_sheet_3cp_utils.report_generator_3cp import ReportGenerator
# __kwargs__.update(
# start_date=input_data.property.get(
# "start_date",
# datetime.now(tz=pytz.timezone(input_data.tz)).strftime("%Y-%m-%d"),
#
# ),
# end_date=input_data.property.get(
# "end_date",
# datetime.now(tz=pytz.timezone(input_data.tz)).strftime("%Y-%m-%d"))
# )
# try:
# logger.info("Calculating Report for 3CP")
# start_date = str(__kwargs__.get('start_date'))
# end_date = str(__kwargs__.get('end_date'))
yml_file_path = "scripts/utils/yield_sheet_3cp_utils/"
try:
with open(os.path.join(yml_file_path, 'engine.yml'), 'r') as engine_yml_file:
config_engine = yaml.full_load(engine_yml_file)
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
all_tags = config_engine["tag_heirarcy"]
tags_cal, tags_cal_prev, tags_manual, tags_dcs = AllTags().get_tags(all_tags_dictionary=all_tags)
# start_date = datetime.strptime(start_date, '%Y-%m-%d')
# start_date = start_date.astimezone(pytz.UTC)
start_date = datetime.strptime("2022-12-25", '%Y-%m-%d').replace(hour=5, minute=0, second=0,
microsecond=0)
start_date = start_date.astimezone(pytz.timezone("Asia/Kolkata")).replace(hour=5, minute=0, second=0,
microsecond=0)
# end_date = datetime.strptime(end_date, '%Y-%m-%d')
# end_date = end_date.astimezone(pytz.UTC)
end_date = datetime.strptime("2022-12-26", '%Y-%m-%d').replace(hour=5, minute=0, second=0,
microsecond=0)
end_date = end_date.astimezone(pytz.timezone("Asia/Kolkata")).replace(hour=5, minute=0, second=0,
microsecond=0)
df, message = ReportGenerator(tags_cal=tags_cal, tags_cal_prev=tags_cal_prev, tags_manual=tags_manual,
tags_dcs=tags_dcs, start_date=start_date,
end_date=end_date).yield_report_3cp()
logger.debug(f'{df.shape}')
logger.debug(f'{message}')
master_output_file = 'test_prod.xlsx'
writer = pd.ExcelWriter(master_output_file, engine='xlsxwriter')
df.to_excel(writer, sheet_name="DPR Sheet", index=True)
workbook = writer.book
format = workbook.add_format(
{'font_name': 'Trebuchet MS', 'text_wrap': True, 'bold': 2, 'font_color': "blue"})
format.set_align('center')
format.set_align('vcenter')
format1 = workbook.add_format({'font_name': 'Trebuchet MS', 'text_wrap': True})
format1.set_align('center')
format1.set_align('vcenter')
header_footer_format = workbook.add_format({
'text_wrap': True
})
no_of_rows = df.shape[0]
worksheet = writer.sheets["DPR Sheet"]
# set the column width as per your requirement
worksheet.set_column('A:F', 15, format)
worksheet.set_column('G:L', 20, format)
worksheet.set_column('N:T', 22, format)
worksheet.set_column('U:Z', 22, format)
worksheet.set_column('AB:AD', 20, format)
worksheet.set_column('AE:AM', 22, format)
worksheet.set_column('AN:AO', 29, format)
worksheet.set_column('AQ:AU', 27, format)
worksheet.set_column('AV:BC', 33, format)
worksheet.set_column('BD:BD', 30, format)
worksheet.set_column('BE:BH', 27, format)
worksheet.set_column('AY:AY', 40, format)
worksheet.set_column('BA:BA', 35, format)
worksheet.set_column('M:M', 10, format)
worksheet.set_column('T:T', 10, format)
worksheet.set_column('AD:AD', 10, format)
worksheet.set_column('AP:AP', 10, format)
worksheet.set_column('BK:BK', 10, format)
worksheet.set_column('AA:AA', 10, format)
worksheet.set_column('AT:AT', 30, format)
worksheet.set_column('BA:BA', 25, format)
format4 = workbook.add_format({'bg_color': 'yellow'})
format5 = workbook.add_format({'text_wrap': True})
worksheet.set_row(0, 28, format5)
worksheet.conditional_format(f'A{no_of_rows + 2}:AP{no_of_rows + 2}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
worksheet.conditional_format(f'AV{no_of_rows + 2}:AZ{no_of_rows + 2}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
worksheet.conditional_format(f'BB{no_of_rows + 2}:BD{no_of_rows + 2}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
# worksheet.conditional_format(f'BF{no_of_rows + 2}:BF{no_of_rows + 2}',
# {'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
worksheet.conditional_format(f'AQ{no_of_rows + 3}:AU{no_of_rows + 3}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
worksheet.conditional_format(f'BA{no_of_rows + 3}:BA{no_of_rows + 3}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
worksheet.conditional_format(f'BE{no_of_rows + 3}:BG{no_of_rows + 3}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
writer.save()
if not master_output_file.endswith(".xlsx"):
master_output_file = master_output_file + ".xlsx"
logger.info("XLSX is getting stitched")
[KAIROS_DB]
uri = $KAIROS_URI
KAIROS_URI= https://iLens:iLensJUB$456@jub-kairos.ilens.io/kairos
if __name__ == "__main__":
from dotenv import load_dotenv
load_dotenv(dotenv_path='config.env')
import datetime
import os
import os.path
import sys
from configparser import ConfigParser, BasicInterpolation
import yaml
from loguru import logger
# Configuring file constants
data_conf = "./conf/data.yml"
engine_conf = "./conf/engine.yml"
class EnvInterpolation(BasicInterpolation):
"""
Interpolation which expands environment variables in values.
"""
def before_get(self, parser, section, option, value, defaults):
value = super().before_get(parser, section, option, value, defaults)
if not os.path.expandvars(value).startswith("$"):
return os.path.expandvars(value)
else:
return
try:
config = ConfigParser(interpolation=EnvInterpolation())
config.read(f"conf/application.conf")
except Exception as e:
print(f"Error while loading the config: {e}")
print("Failed to Load Configuration. Exiting!!!")
sys.exit()
class DB:
class KairosDb:
uri = config["KAIROS_DB"]["uri"]
{
"_id" : "62a7225ab76af54830e2074d",
"id" : "yield_report_3cp",
"name" : "Yield Report 3cp",
"template_type" : "yield_report_3cp",
"project_id" : "project_107",
"associate_hierarchy" : false,
"deleteList" : [],
"description" : "Yield Report 3cp",
"sequence" : [],
"siteHierarchy" : null,
"siteHierarchyLevel" : "",
"template_conf" : {},
"isPublished" : true,
"uploadType" : "excel",
"meta" : {
"created_on" : 1638421200000,
"created_by" : "user_335"
},
"info" : {
"logbooks" : [
"logbook_152"
],
"download_formats" : [
"excel"
],
"download_properties" : [
{
"key" : "dateRange",
"label" : "Date Range",
"start_date" : null,
"end_date" : null,
"source" : "ebpr",
"logbook_id" : "logbook_152"
}
]
}
}
\ No newline at end of file
from loguru import logger
class TagsDict:
def __init__(self):
pass
def Merge(self, dict1, dict2):
return {**dict1, **dict2}
def all_tags(self, tags_cal, tags_cal_prev, tags_manual, tags_dcs):
try:
manual_dcs_tags = self.Merge(dict1=tags_manual, dict2=tags_dcs)
cal_calprev_tags = self.Merge(dict1=tags_cal, dict2=tags_cal_prev)
logger.info(f"No of combined Manual & DCS tags = {len(manual_dcs_tags)} \
No of Manual tags only = {len(tags_manual)} \
No of DCS tags only = {len(tags_dcs)}")
logger.info(f"No of combined Cal & Cal prev tags = {len(cal_calprev_tags)} \
No of Cal tags only = {len(tags_cal)} \
No of Cal prev tags only = {len(tags_cal_prev)}")
return manual_dcs_tags, cal_calprev_tags
except Exception as e:
logger.exception("Exception occurred", exc_info=True)
class AllTags:
def __init__(self):
pass
def get_tags(self, all_tags_dictionary):
try:
cal_tags = {tag: {"column_tag": all_tags_dictionary[tag]["column_tag"],
"source": all_tags_dictionary[tag]["source"]} for tag, v in all_tags_dictionary.items() if
all_tags_dictionary[tag]["source"] == 'calculated'}
cal_prev_tags = {tag: {"column_tag": all_tags_dictionary[tag]["column_tag"],
"source": all_tags_dictionary[tag]["source"]} for tag, v in
all_tags_dictionary.items() if
all_tags_dictionary[tag]["source"] == 'calculated_depends_previous_value'}
manual_tags = {tag: {"column_tag": all_tags_dictionary[tag]["column_tag"],
"source": all_tags_dictionary[tag]["source"]} for tag, v in all_tags_dictionary.items()
if all_tags_dictionary[tag]["source"] == 'manual'}
dcs_tags = {tag: {"column_tag": all_tags_dictionary[tag]["column_tag"],
"source": all_tags_dictionary[tag]["source"]} for tag, v in all_tags_dictionary.items() if
all_tags_dictionary[tag]["source"] == 'dcs'}
return self.tags_category(cal_tags, cal_prev_tags, manual_tags, dcs_tags)
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
def tags_category(self, cal_tags, cal_prev_tags, manual_tags, dcs_tags):
try:
tags_cal = {tag: cal_tags[tag]["column_tag"] for tag, v in cal_tags.items()}
tags_cal_prev = {tag: cal_prev_tags[tag]["column_tag"] for tag, v in cal_prev_tags.items()}
tags_manual = {tag: manual_tags[tag]["column_tag"] for tag, v in manual_tags.items()}
tags_dcs = {tag: dcs_tags[tag]["column_tag"] for tag, v in dcs_tags.items()}
return tags_cal, tags_cal_prev, tags_manual, tags_dcs
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
import pandas as pd
from datetime import datetime, timedelta
from loguru import logger
from scripts.utils.yield_sheet_3cp_utils.data_puller_3cp import Kairos_query
from scripts.utils.yield_sheet_3cp_utils.data_calculator import CalculationCalculatedColumns
class CalculatedDataframe:
def __init__(self, date_output_filtered, all_calculated_tags, all_cal_tags_dict, df_manual_dcs):
self.date_output_filtered=date_output_filtered
self.all_calculated_tags=all_calculated_tags
self.all_cal_tags_dict=all_cal_tags_dict
self.df_manual_dcs=df_manual_dcs
def unavailable_cal_tags(self, date_output_calculated):
try:
unavailable_tags_cal = {}
for k, v in date_output_calculated.items():
if len(v) != len(self.all_calculated_tags):
required_tags_cal = [tags for tags in list(self.all_cal_tags_dict.keys()) if
tags not in list(v.keys())]
unavailable_tags_cal[k] = required_tags_cal
return unavailable_tags_cal
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
def calculated_df(self):
try:
date_output_calculated = {}
start_date_calculated_tags = list(self.date_output_filtered.keys())[0]
end_date_calculated_tags = start_date_calculated_tags
periodic_start_time_calculated_tags = int(start_date_calculated_tags.timestamp()) * 1000
periodic_end_time_calculated_tags = int((end_date_calculated_tags).timestamp()) * 1000
query_calculated = Kairos_query().kairos_query(start=periodic_start_time_calculated_tags,
end=periodic_end_time_calculated_tags,
tag=self.all_calculated_tags)
# logger.info(f"Query = {query_calculated}")
data_calculated = Kairos_query().get_data_from_kairos(query=query_calculated, tags_dict=dict(
(v, k) for k, v in self.all_cal_tags_dict.items()), date=start_date_calculated_tags)
# logger.debug(f"Data of all calculated tags for 1st day = {data_calculated}")
date_output_calculated[start_date_calculated_tags] = data_calculated
unavailable_tags_cal = self.unavailable_cal_tags(date_output_calculated)
logger.info(f"unavailable tags calculated = {unavailable_tags_cal}")
columns_tags_calculated = list(list(date_output_calculated.values())[0].keys())
value_list_calculated = [list(dic.values()) for dic in list(date_output_calculated.values())]
value_list_calculated_flat = [item for sublist in value_list_calculated for item in sublist]
df_calculated = pd.DataFrame(index=[no for no in range(self.df_manual_dcs.shape[0])],
columns=columns_tags_calculated)
df_calculated.iloc[0, :] = value_list_calculated_flat
logger.debug(f"Dataframe shape of Calculated tags = {df_calculated.shape}")
logger.debug(f"{df_calculated}")
logger.info(f"Calculating calculated column values started")
df_calculated = CalculationCalculatedColumns().calculations(df_manual_dcs=self.df_manual_dcs,
df_calculated=df_calculated)
logger.info(f"The shape of dataframe = {df_calculated.shape}")
for col in df_calculated.columns:
df_calculated[col] = df_calculated[col].astype(float).round(4)
df_calculated_ebpr = df_calculated.copy()
return df_calculated_ebpr, df_calculated, unavailable_tags_cal
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
\ No newline at end of file
import pandas as pd
from loguru import logger
class ConcatenateManualCalculatedDf:
def __init__(self, df_manual_dcs_ebpr, df_calculated_ebpr, start_date):
self.df_manual_dcs_ebpr=df_manual_dcs_ebpr
self.df_calculated_ebpr=df_calculated_ebpr
self.start_date=start_date
def concate_manual_calculated_df(self):
try:
self.df_manual_dcs_ebpr.drop(['Beta_Purification_Column_C_2409_Outlet_Flow_TZ'], axis=1, inplace=True)
df_ebpr = pd.concat([self.df_manual_dcs_ebpr, self.df_calculated_ebpr], axis=1)
if df_ebpr['Date'][0] == self.start_date:
df_ebpr = df_ebpr[df_ebpr['Date'] > self.start_date]
# df_ebpr['Date'] = df_ebpr['Date'] + timedelta(days=1)
df_ebpr['Date'] = df_ebpr['Date'].dt.strftime('%Y-%m-%d')
return df_ebpr
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
\ No newline at end of file
import pandas as pd
from loguru import logger
enable_traceback = True
class CalculationCalculatedColumns:
def __init__(self):
pass
def summation(self, tag_list):
return sum(tag_list)
def subtract(self, first_ele, second_ele):
return (first_ele - second_ele)
def division(self, first_ele, second_ele):
return (first_ele / second_ele)
def calculations(self, df_manual_dcs, df_calculated):
logger.info(f"Calculated data for 3CP")
try:
for rows in range(1, df_manual_dcs.shape[0]):
df_calculated.loc[df_calculated.index[rows], "D1D001_consumptions_Opening_DPR"] = \
df_calculated.loc[df_calculated.index[rows - 1], "D1D001_consumptions_Closing_DPR"]
df_calculated.loc[df_calculated.index[rows], "D1D001_consumptions_Total_Receipt_DPR"] = \
self.summation([df_calculated.loc[df_calculated.index[rows - 1], "D1D001_consumptions_Total_Receipt_DPR"], \
df_manual_dcs.loc[df_calculated.index[rows], 'D1D001_consumptions_Day_Receipt_DPR']])
df_calculated.loc[df_calculated.index[rows], "D1D001_consumptions_Closing_DPR"] = \
(self.summation(
[((self.summation([df_manual_dcs.loc[df_calculated.index[rows], 'D1D001Readings_T_2703_A_DPR'], \
df_manual_dcs.loc[
df_calculated.index[rows], 'D1D001Readings_T_2703_B_DPR']]) + 30) * 331.8 * 0.95), \
(self.summation([df_manual_dcs.loc[df_calculated.index[rows], 'D1D001Readings_T_2101_A_DPR'], \
df_manual_dcs.loc[df_calculated.index[rows], 'D1D001Readings_T_2101_B_DPR']])) * 459]) + \
(30 * 101.8 * 0.95)) / 1000
df_calculated.loc[df_calculated.index[rows], "D1D001_consumptions_Day_Recovery_DPR"] = \
self.subtract(df_manual_dcs.loc[df_calculated.index[rows],
'Beta_Purification_Column_C_2409_Outlet_Flow_TZ'],
df_manual_dcs.loc[df_calculated.index[rows - 1],
'Beta_Purification_Column_C_2409_Outlet_Flow_TZ'],
)
df_calculated.loc[df_calculated.index[rows], "D1D001_consumptions_Day_Cons_DPR"] = \
self.subtract(self.summation([df_calculated.loc[df_calculated.index[rows], "D1D001_consumptions_Opening_DPR"], \
df_manual_dcs.loc[
df_calculated.index[rows], 'D1D001_consumptions_Day_Receipt_DPR'], \
df_calculated.loc[
df_calculated.index[rows], 'D1D001_consumptions_Day_Recovery_DPR']]), \
df_calculated.loc[df_calculated.index[rows], "D1D001_consumptions_Closing_DPR"])
df_calculated.loc[df_calculated.index[rows], "7302011030_Consumptions_Opening_DPR"] = \
df_manual_dcs.loc[df_calculated.index[rows - 1], '7302011030_Consumptions_Closing_DPR']
df_calculated.loc[df_calculated.index[rows], "7302011030_Consumptions_Day_Cons_DPR"] = \
self.subtract(
self.summation([df_calculated.loc[df_calculated.index[rows], "7302011030_Consumptions_Opening_DPR"], \
df_manual_dcs.loc[df_calculated.index[rows], '7302011030_Consumptions_Day_Receipt_DPR']]), \
df_manual_dcs.loc[df_calculated.index[rows], '7302011030_Consumptions_Closing_DPR'])
df_calculated.loc[df_calculated.index[rows], "7302011030_Consumptions_Total_Cons_DPR"] = \
self.summation([df_calculated.loc[df_calculated.index[rows - 1], "7302011030_Consumptions_Total_Cons_DPR"], \
df_calculated.loc[df_calculated.index[rows], '7302011030_Consumptions_Day_Cons_DPR']])
df_calculated.loc[df_calculated.index[rows], "7302011030_Consumptions_Total_Receipt_DPR"] = \
self.summation([df_calculated.loc[df_calculated.index[rows - 1], "7302011030_Consumptions_Total_Receipt_DPR"], \
df_manual_dcs.loc[df_calculated.index[rows], '7302011030_Consumptions_Day_Receipt_DPR']])
df_calculated.loc[df_calculated.index[rows], "7302011061_Consumption_Opening_DPR"] = \
df_calculated.loc[df_calculated.index[rows - 1], "7302011061_Consumption_Closing_DPR"]
df_calculated.loc[df_calculated.index[rows], "7302011061_Consumption_Closing_DPR"] = \
self.subtract(
self.summation([df_calculated.loc[df_calculated.index[rows], "7302011061_Consumption_Opening_DPR"], \
df_manual_dcs.loc[df_calculated.index[rows], '7302011061_Consumption_Day_Receipt_DPR']]), \
df_manual_dcs.loc[df_calculated.index[rows], '7302011061_Consumption_Day_Cons_DPR'])
df_calculated.loc[df_calculated.index[rows], "7302011061_Consumption_Total_Cons_DPR"] = \
self.summation([df_calculated.loc[df_calculated.index[rows - 1], "7302011061_Consumption_Total_Cons_DPR"], \
df_manual_dcs.loc[df_calculated.index[rows], '7302011061_Consumption_Day_Cons_DPR']])
df_calculated.loc[df_calculated.index[rows], "7302011061_Consumption_Total_Receipt_DPR"] = \
self.summation([df_calculated.loc[df_calculated.index[rows - 1], "7302011061_Consumption_Total_Receipt_DPR"], \
df_manual_dcs.loc[df_calculated.index[rows], '7302011061_Consumption_Day_Receipt_DPR']])
df_calculated.loc[df_calculated.index[rows], "Crude_Prod_Day_Prod_DPR"] = \
df_calculated.loc[df_calculated.index[rows], "D1D001_consumptions_Day_Cons_DPR"] * \
df_manual_dcs.loc[df_calculated.index[rows], 'D1D001Readings_Conv_DPR']
df_calculated.loc[df_calculated.index[rows], "Crude_Prod_Total_Prod_DPR"] = \
self.summation([df_calculated.loc[df_calculated.index[rows - 1], "Crude_Prod_Total_Prod_DPR"], \
df_calculated.loc[df_calculated.index[rows], 'Crude_Prod_Day_Prod_DPR']])
df_calculated.loc[df_calculated.index[rows], "Pure_Production_Opening_DPR"] = \
df_calculated.loc[df_calculated.index[rows - 1], "Pure_Production_Closing_of_Pure_Tanks_only_DPR"]
df_calculated.loc[df_calculated.index[rows], "Pure_Production_Closing_of_Pure_Tanks_only_DPR"] = \
self.summation(
[((self.summation([df_manual_dcs.loc[df_calculated.index[rows], 'Pure_Production_LT_2701_A_DPR'], \
df_manual_dcs.loc[df_calculated.index[rows], 'Pure_Production_LT_2701_B_DPR']])) * 1.08), \
df_manual_dcs.loc[df_calculated.index[rows], 'Pure_Production_Pure_tank_Dead_Volumes_DPR']])
df_calculated.loc[df_calculated.index[rows], "Pure_Production_Total_Nia_DPR"] = \
self.summation([df_calculated.loc[df_calculated.index[rows - 1], "Pure_Production_Total_Nia_DPR"], \
df_manual_dcs.loc[df_calculated.index[rows], 'Pure_Production_Day_Nia_DPR']])
df_calculated.loc[df_calculated.index[rows], "Pure_Production_Total_Drum_Filling_DPR"] = \
self.summation([df_calculated.loc[df_calculated.index[rows - 1], "Pure_Production_Total_Drum_Filling_DPR"], \
df_manual_dcs.loc[df_calculated.index[rows], 'Pure_Production_Day_Drum_Filling_DPR']])
df_calculated.loc[df_calculated.index[rows], "Pure_Production_Day_Prod_DPR"] = \
self.subtract(self.summation(
[df_calculated.loc[df_calculated.index[rows], "Pure_Production_Closing_of_Pure_Tanks_only_DPR"], \
df_manual_dcs.loc[df_calculated.index[rows], 'Pure_Production_Day_Nia_DPR'], \
df_manual_dcs.loc[df_calculated.index[rows], 'Pure_Production_Day_Drum_Filling_DPR']]), \
df_calculated.loc[df_calculated.index[rows], "Pure_Production_Opening_DPR"])
df_calculated.loc[df_calculated.index[rows], "Pure_Production_Total_Prod_DPR"] = \
self.summation([df_calculated.loc[df_calculated.index[rows - 1], "Pure_Production_Total_Prod_DPR"], \
df_calculated.loc[df_calculated.index[rows], 'Pure_Production_Day_Prod_DPR']])
df_calculated.loc[df_calculated.index[rows], "Utility_report_Actual_Day_Prod"] = \
df_calculated.loc[df_calculated.index[rows], "Crude_Prod_Day_Prod_DPR"]
df_calculated.loc[df_calculated.index[rows], "Utility_report_Power_Norms"] = \
self.division(df_manual_dcs.loc[df_calculated.index[rows], 'Utility_report_Day_Power'], \
df_calculated.loc[df_calculated.index[rows], 'Utility_report_Actual_Day_Prod'])
df_calculated.loc[df_calculated.index[rows], "Utility_report_Steam_Norms"] = \
self.division(df_manual_dcs.loc[df_calculated.index[rows], 'Utility_report_Day_Steam'], \
df_calculated.loc[df_calculated.index[rows], 'Utility_report_Actual_Day_Prod'])
df_calculated.loc[df_calculated.index[rows], "Utility_report_Raffinate_Norms"] = \
self.division(df_manual_dcs.loc[df_calculated.index[rows], 'Utility_report_Day_Raffinate'], \
df_calculated.loc[df_calculated.index[rows], 'Utility_report_Actual_Day_Prod'])
df_calculated.loc[df_calculated.index[rows], "Utility_report_Raffinate_Vent_Gas"] = \
self.division(df_manual_dcs.loc[df_calculated.index[rows], 'Utility_report_Vent_Gas_Raffinate'], \
df_calculated.loc[df_calculated.index[rows], 'Utility_report_Actual_Day_Prod'])
df_calculated.loc[df_calculated.index[rows], "Utility_report_Raw_Water_Norms"] = \
self.division(df_manual_dcs.loc[df_calculated.index[rows], 'Utility_report_Day_Treated_Water'], \
df_calculated.loc[df_calculated.index[rows], 'Utility_report_Actual_Day_Prod'])
df_calculated.loc[df_calculated.index[rows], "Utility_report_per_hr_burn_rate"] = \
df_manual_dcs.loc[df_calculated.index[rows], 'Utility_report_Raffinate_Incinerated'] * 1000 / 24
df_calculated.loc[df_calculated.index[rows], "Utility_report_Actual_Ammonia_Norms"] = \
self.division(df_calculated.loc[df_calculated.index[rows], "7302011030_Consumptions_Day_Cons_DPR"], \
df_calculated.loc[df_calculated.index[rows], "Utility_report_Actual_Day_Prod"])
df_calculated.loc[df_calculated.index[rows], "Utility_report_Actual_Beta_Norms"] = \
self.division(self.subtract(df_calculated.loc[df_calculated.index[rows], "D1D001_consumptions_Day_Cons_DPR"], \
df_calculated.loc[
df_calculated.index[rows], "D1D001_consumptions_Day_Recovery_DPR"]), \
df_calculated.loc[df_calculated.index[rows], "Utility_report_Actual_Day_Prod"])
df_calculated.loc[df_calculated.index[rows], "Utility_report_Actual_Benzene_Norms"] = \
self.division(df_manual_dcs.loc[df_calculated.index[rows], '7302011061_Consumption_Day_Cons_DPR'], \
df_calculated.loc[df_calculated.index[rows], "Utility_report_Actual_Day_Prod"])
df_calculated.loc[df_calculated.index[rows], "Utility_report_Day_DM_norm"] = \
self.division(df_manual_dcs.loc[df_calculated.index[rows], 'Utility_report_Day_DM'], \
df_calculated.loc[df_calculated.index[rows], "Utility_report_Actual_Day_Prod"])
return df_calculated
except Exception as e:
logger.exception("Exception occurred", exc_info=True)
import json
import pandas as pd
import requests
from loguru import logger
from scripts.constants.app_configuration import DB
class Kairos_query:
def __init__(self):
self.kairos_host = DB.KairosDb.uri
# "https://iLens:iLensJUB$456@jubilant.ilens.io/kairos"
self.kairos_url = "{kairos_host}/api/v1/datapoints/query".format(
kairos_host=self.kairos_host)
# logger.info(f"kairos URL is {self.kairos_url}")
def kairos_query(self, start, end, tag):
try:
return {
"metrics": [
{
"tags": {
"c3": tag
},
"name": "project_227__ilens.live_data.raw",
"group_by": [
{
"name": "tag",
"tags": ["c3"]
}
],
"aggregators": [
{
"name": "last",
"sampling": {
"value": "1",
"unit": "minutes"
}
}
]
}
],
"plugins": [],
"cache_time": 0,
"time_zone": "Asia/Calcutta",
"start_absolute": start,
"end_absolute": end,
}
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
def get_data_from_kairos(self, query, tags_dict, date):
output = {}
try:
response = requests.post(self.kairos_url, data=json.dumps(query))
abb = response.json()
grouped_output_data = response.json()["queries"][0]["results"]
for each_grouped_data in grouped_output_data:
value = (each_grouped_data["values"])
tem = each_grouped_data.get('group_by', None)
tag_id = None
if tem is not None:
tag_id = each_grouped_data["group_by"][0]["group"]["c3"]
else:
for k, v in tags_dict.items():
output[v] = 0
# return output
try:
output[tags_dict[tag_id]] = round(value[0][1], 2)
except Exception as e:
logger.exception(f"Exception occurred for tag = {tag_id} and date = {date}", exc_info=True)
output[tags_dict[tag_id]] = 0
return output
except Exception as e:
logger.exception(f"Exception occurred - {e} for date = {date}", exc_info=True)
import pandas as pd
from datetime import datetime, timedelta
from loguru import logger
class EbprManualDcsDataframe:
def __init__(self, date_output, all_manual_dcs_tags):
self.date_output=date_output
self.all_manual_dcs_tags=all_manual_dcs_tags
def ebpr_manual_dcs_dataframe(self):
try:
date_output_filtered = self.date_output
# date_output_filtered = {k: v for k, v in date_output_filtered.items() if len(v) == len(self.all_manual_dcs_tags)}
columns_tags = list(list(date_output_filtered.values())[0].keys())
value_list = [list(dic.values()) for dic in list(date_output_filtered.values())]
df_manual_dcs = pd.DataFrame(columns=columns_tags, data=value_list)
# df_manual_dcs = df_manual_dcs.round(decimals=3)
df_manual_dcs_dates = list(date_output_filtered.keys())
date_list = [list(date_output_filtered.keys())[0] + timedelta(days=x) \
for x in range((list(date_output_filtered.keys())[-1] -
list(date_output_filtered.keys())[0]).days + 1)]
date_list1 = list(date_output_filtered.keys())
df_manual_dcs["Date"] = date_list1
# df_manual_dcs.drop(['Beta_Purification_Column_C_2409_Outlet_Flow_TZ'], axis=1, inplace=True)
df_manual_dcs_ebpr = df_manual_dcs.copy()
# shift column 'Name' to first position
first_column = df_manual_dcs_ebpr.pop('Date')
# insert column using insert(position,column_name,
# first_column) function
df_manual_dcs_ebpr.insert(0, 'Date', first_column)
logger.debug(f"Dataframe shape of Manual and dcs tags = {df_manual_dcs.shape}")
logger.info(f"No of rows containing NaN values in manual and dcs data = "
f"{df_manual_dcs[df_manual_dcs.isnull().any(axis=1)].shape[0]}")
null_col_manual_list = [col for col in df_manual_dcs.columns if df_manual_dcs[col].isnull().any()]
logger.info(f"Columns containing NaN values in manual and dcs data = {null_col_manual_list}")
return df_manual_dcs, df_manual_dcs_ebpr, date_output_filtered
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
tag_heirarcy:
D1D001_consumptions_Opening_DPR:
column_tag: site_116$dept_134$line_350$equipment_4298$tag_6452
source: calculated
dependency:
- D1D001_consumptions_Closing_DPR
D1D001_consumptions_Day_Cons_DPR:
column_tag: site_116$dept_134$line_350$equipment_4298$tag_6456
source: calculated
dependency:
- D1D001_consumptions_Opening_DPR
- D1D001_consumptions_Day_Receipt_DPR
- D1D001_consumptions_Day_Recovery_DPR
- D1D001_consumptions_Closing_DPR
7302011030_Consumptions_Opening_DPR:
column_tag: site_116$dept_134$line_350$equipment_4299$tag_6487
source: calculated
dependency:
- 7302011030_Consumptions_Closing_DPR
7302011030_Consumptions_Day_Cons_DPR:
column_tag: site_116$dept_134$line_350$equipment_4299$tag_6490
source: calculated
dependency:
- D1D001_consumptions_Opening_DPR
- D1D001_consumptions_Day_Receipt_DPR
- D1D001_consumptions_Day_Recovery_DPR
- D1D001_consumptions_Closing_DPR
7302011030_Consumptions_Closing_DPR:
column_tag: site_116$dept_134$line_350$equipment_4299$tag_6491
source: manual
dependency:
- None
7302011061_Consumption_Opening_DPR:
column_tag: site_116$dept_134$line_350$equipment_4302$tag_6492
source: calculated
dependency:
- 7302011061_Consumption_Closing_DPR
Crude_Prod_Day_Prod_DPR:
column_tag: site_116$dept_134$line_350$equipment_4305$tag_6460
source: calculated
dependency:
- D1D001_consumptions_Day_Cons_DPR
- D1D001Readings_Conv_DPR
Pure_Production_Opening_DPR:
column_tag: site_116$dept_134$line_350$equipment_4306$tag_6497
source: calculated
dependency:
- Pure_Production_Closing_of_Pure_Tanks_only_DPR
Pure_Production_Day_Prod_DPR:
column_tag: site_116$dept_134$line_350$equipment_4306$tag_6496
source: calculated
dependency:
- Crude_Prod_Day_Prod_DPR
Utility_report_Power_Norms:
column_tag: site_116$dept_135$line_366$equipment_4307$tag_6470
source: calculated
dependency:
- Utility_report_Day_Power
- Utility_report_Actual_Day_Prod
Utility_report_Steam_Norms:
column_tag: site_116$dept_135$line_366$equipment_4307$tag_6471
source: calculated
dependency:
- Utility_report_Day_Steam
- Utility_report_Actual_Day_Prod
Utility_report_Raffinate_Norms:
column_tag: site_116$dept_135$line_366$equipment_4307$tag_6472
source: calculated
dependency:
- Utility_report_Day_Raffinate
- Utility_report_Actual_Day_Prod
Utility_report_Raffinate_Vent_Gas:
column_tag: site_116$dept_135$line_366$equipment_4307$tag_6473
source: calculated
dependency:
- Utility_report_Vent_Gas_Raffinate
- Utility_report_Actual_Day_Prod
Utility_report_Raw_Water_Norms:
column_tag: site_116$dept_135$line_366$equipment_4307$tag_6474
source: calculated
dependency:
- Utility_report_Day_Treated_Water
- Utility_report_Actual_Day_Prod
# Utility_report_Actual_Day_Prod:
# column_tag: site_116$dept_135$line_366$equipment_4307$tag_6476
# source: calculated
# dependency:
# - Crude_Prod_Day_Prod_DPR
Utility_report_per_hr_burn_rate:
column_tag: site_116$dept_135$line_366$equipment_4307$tag_6483
source: calculated
dependency:
- Utility_report_Raffinate_Incinerated
D1D001Readings_T_2703_A_DPR:
column_tag: site_116$dept_134$line_350$equipment_4298$tag_6447
source: manual
dependency: None
D1D001Readings_T_2703_B_DPR:
column_tag: site_116$dept_134$line_350$equipment_4298$tag_6448
source: manual
dependency: None
D1D001Readings_Conv_DPR:
column_tag: site_116$dept_134$line_350$equipment_4298$tag_6451
source: manual
dependency: None
D1D001_consumptions_Day_Receipt_DPR:
column_tag: site_116$dept_134$line_350$equipment_4298$tag_6453
source: manual
dependency: None
D1D001_consumptions_Day_Recovery_DPR:
column_tag: site_116$dept_134$line_350$equipment_4298$tag_6454
source: calculated_depends_previous_value
dependency: None
7302011030_Consumptions_Day_Receipt_DPR:
column_tag: site_116$dept_134$line_350$equipment_4299$tag_6488
source: manual
dependency: None
7302011061_Consumption_Day_Receipt_DPR:
column_tag: site_116$dept_134$line_350$equipment_4302$tag_6493
source: manual
dependency: None
7302011061_Consumption_Day_Cons_DPR:
column_tag: site_116$dept_134$line_350$equipment_4302$tag_6459
source: manual
dependency: None
## Local_Drums_consumption_Day_Receipt_DPR:
## column_tag: site_116$dept_134$line_350$equipment_4303$tag_6504
## source: manual
## dependency: None
##
## DPR_Local_Drums_consumption_Day_Cons:
# column_tag: site_116$dept_134$line_350$equipment_4303$tag_6511
# source: manual
# dependency: None
# DPR_Export_Drum_Consumption_Day_Receipt:
# column_tag: site_116$dept_134$line_350$equipment_4304$tag_6507
# source: manual
# dependency:
#
# DPR_Export_Drum_Consumption_Day_Cons:
# column_tag: site_116$dept_134$line_350$equipment_4304$tag_6503
# source: manual
# dependency: None
Pure_Production_Day_Nia_DPR:
column_tag: site_116$dept_134$line_350$equipment_4306$tag_6462
source: manual
dependency: None
Pure_Production_Day_Drum_Filling_DPR:
column_tag: site_116$dept_134$line_350$equipment_4306$tag_6464
source: manual
dependency: None
Pure_Production_Pure_tank_Dead_Volumes_DPR:
column_tag: site_116$dept_134$line_350$equipment_4306$tag_6469
source: manual
dependency: None
Utility_report_Day_Power:
column_tag: site_116$dept_135$line_366$equipment_4307$tag_6475
source: manual
dependency: None
Utility_report_Day_Steam:
column_tag: site_116$dept_135$line_366$equipment_4307$tag_6477
source: manual
dependency: None
Utility_report_Day_Raffinate:
column_tag: site_116$dept_135$line_366$equipment_4307$tag_6478
source: manual
dependency: None
Utility_report_Vent_Gas_Raffinate:
column_tag: site_116$dept_135$line_366$equipment_4307$tag_6479
source: manual
dependency: None
Utility_report_Day_DM:
column_tag: site_116$dept_135$line_366$equipment_4307$tag_6480
source: manual
dependency: None
Utility_report_Day_Treated_Water:
column_tag: site_116$dept_135$line_366$equipment_4307$tag_6481
source: manual
dependency: None
Utility_report_Raffinate_Incinerated:
column_tag: site_116$dept_135$line_366$equipment_4307$tag_6482
source: manual
dependency: None
# Utility_report_Compressor:
# column_tag: site_116$dept_135$line_366$equipment_4307$tag_6484
# source: manual
# dependency: None
D1D001Readings_T_2101_A_DPR:
column_tag: site_116$dept_134$line_350$equipment_4298$tag_6449
source: dcs
dependency: None
D1D001Readings_T_2101_B_DPR:
column_tag: site_116$dept_134$line_350$equipment_4298$tag_6450
source: dcs
dependency: None
Pure_Production_LT_2701_A_DPR:
column_tag: site_116$dept_134$line_350$equipment_4306$tag_6466
source: dcs
dependency: None
Pure_Production_LT_2701_B_DPR:
column_tag: site_116$dept_134$line_350$equipment_4306$tag_6467
source: dcs
dependency: None
D1D001_consumptions_Closing_DPR:
column_tag: site_116$dept_134$line_350$equipment_4298$tag_6457
source: calculated
dependency:
- D1D001Readings_T_2703_A_DPR
- D1D001Readings_T_2703_B_DPR
- D1D001Readings_T_2101_A_DPR
- D1D001Readings_T_2101_B_DPR
D1D001_consumptions_Total_Receipt_DPR:
column_tag: site_116$dept_134$line_350$equipment_4298$tag_6455
source: calculated_depends_previous_value
dependency:
- D1D001_consumptions_Total_Receipt_DPR
- D1D001_consumptions_Day_Receipt_DPR
7302011030_Consumptions_Total_Receipt_DPR:
column_tag: site_116$dept_134$line_350$equipment_4299$tag_6489
source: calculated_depends_previous_value
dependency:
- 7302011030_Consumptions_Total_Receipt_DPR
- 7302011030_Consumptions_Day_Receipt_DPR
7302011030_Consumptions_Total_Cons_DPR:
column_tag: site_116$dept_134$line_350$equipment_4299$tag_6458
source: calculated_depends_previous_value
dependency:
- 7302011030_Consumptions_Total_Cons_DPR
- 7302011030_Consumptions_Day_Cons_DPR
7302011061_Consumption_Closing_DPR:
column_tag: site_116$dept_134$line_350$equipment_4302$tag_6499
source: calculated_depends_previous_value
dependency:
- 7302011061_Consumption_Opening_DPR
- 7302011061_Consumption_Day_Receipt_DPR
- 7302011061_Consumption_Day_Cons_DPR
7302011061_Consumption_Total_Receipt_DPR:
column_tag: site_116$dept_134$line_350$equipment_4302$tag_6495
source: calculated_depends_previous_value
dependency:
- 7302011061_Consumption_Total_Receipt_DPR
- 7302011030_Consumptions_Day_Receipt_DPR
7302011061_Consumption_Total_Cons_DPR:
column_tag: site_116$dept_134$line_350$equipment_4302$tag_6498
source: calculated_depends_previous_value
dependency:
- 7302011061_Consumption_Total_Cons_DPR
- 7302011061_Consumption_Day_Cons_DPR
# DPR_Local_Drums_consumption_Closing:
# column_tag: site_116$dept_134$line_350$equipment_4303$tag_6509
# source: calculated_depends_previous_value
# dependency:
# - DPR_Local_Drums_consumption_Opening
# - DPR_Local_Drums_consumption_Day_Receipt
# - DPR_Local_Drums_consumption_Day_Cons
#
# DPR_Local_Drums_consumption_Total_Receipt:
# column_tag: site_116$dept_134$line_350$equipment_4303$tag_6506
# source: calculated_depends_previous_value
# dependency:
# - DPR_Local_Drums_consumption_Total_Receipt
# - DPR_Local_Drums_consumption_Day_Receipt
#
# DPR_Local_Drums_consumption_Total_Cons:
# column_tag: site_116$dept_134$line_350$equipment_4303$tag_6510
# source: calculated_depends_previous_value
# dependency:
# - DPR_Local_Drums_consumption_Total_Cons
# - DPR_Local_Drums_consumption_Day_Cons
# DPR_Export_Drum_Consumption_Closing:
# column_tag: site_116$dept_134$line_350$equipment_4304$tag_6501
# source: calculated_depends_previous_value
# dependency:
# - DPR_Export_Drum_Consumption_Opening
# - DPR_Export_Drum_Consumption_Day_Receipt
# - DPR_Export_Drum_Consumption_Day_Cons
#
# DPR_Export_Drum_Consumption_Total_Receipt:
# column_tag: site_116$dept_134$line_350$equipment_4304$tag_6505
# source: calculated_depends_previous_value
# dependency:
# - DPR_Export_Drum_Consumption_Total_Receipt
# - DPR_Export_Drum_Consumption_Day_Receipt
#
# DPR_Export_Drum_Consumption_Total_Cons:
# column_tag: site_116$dept_134$line_350$equipment_4304$tag_6502
# source: calculated_depends_previous_value
# dependency:
# - DPR_Export_Drum_Consumption_Total_Cons
# - DPR_Export_Drum_Consumption_Day_Cons
Crude_Prod_Total_Prod_DPR:
column_tag: site_116$dept_134$line_350$equipment_4305$tag_6461
source: calculated_depends_previous_value
dependency:
- Crude_Prod_Total_Prod_DPR
- Crude_Prod_Day_Prod_DPR
Pure_Production_Closing_of_Pure_Tanks_only_DPR:
column_tag: site_116$dept_134$line_350$equipment_4306$tag_6468
source: calculated_depends_previous_value
dependency:
- Pure_Production_LT_2701_A_DPR
- Pure_Production_LT_2701_B_DPR
- Pure_Production_Pure_tank_Dead_Volumes_DPR
Pure_Production_Total_Prod_DPR:
column_tag: site_116$dept_134$line_350$equipment_4306$tag_6494
source: calculated_depends_previous_value
dependency:
- Pure_Production_Total_Prod_DPR
- Pure_Production_Day_Prod_DPR
Pure_Production_Total_Nia_DPR:
column_tag: site_116$dept_134$line_350$equipment_4306$tag_6463
source: calculated_depends_previous_value
dependency:
- Pure_Production_Total_Nia_DPR
- Pure_Production_Day_Nia_DPR
Pure_Production_Total_Drum_Filling_DPR:
column_tag: site_116$dept_134$line_350$equipment_4306$tag_6465
source: calculated_depends_previous_value
dependency:
- Pure_Production_Total_Drum_Filling_DPR
- Pure_Production_Day_Drum_Filling_DPR
Utility_report_Actual_Ammonia_Norms:
column_tag: site_116$dept_134$line_350$equipment_4179$tag_6486
source: calculated
dependency:
- 7302011030_Consumptions_Day_Cons_DPR
- Utility_report_Actual_Day_Prod
Utility_report_Actual_Beta_Norms:
column_tag: site_116$dept_134$line_350$equipment_4179$tag_6724
source: calculated
dependency:
- D1D001_consumptions_Day_Cons_DPR
- D1D001_consumptions_Day_Recovery_DPR
- Utility_report_Actual_Day_Prod
Utility_report_Actual_Benzene_Norms:
column_tag: site_116$dept_134$line_350$equipment_4179$tag_6725
source: calculated
dependency:
- 7302011061_Consumption_Day_Cons_DPR
- Utility_report_Actual_Day_Prod
Utility_report_Day_DM_norm:
column_tag: site_116$dept_135$line_366$equipment_4307$tag_6915
source: calculated
dependency: Utility_report_Day_DM
# Utility_report_Treated_Water_Norm:
# column_tag: site_116$dept_135$line_366$equipment_4307$tag_6914
# source: calculated
# dependency: Utility_report_Day_Treated_Water
# Utility_report_Actual_Air_Norms:
# column_tag: site_116$dept_134$line_350$equipment_4179$tag_6726
# source: calculated
# dependency:
# - Utility_report_Day_Air
# - Utility_report_Actual_Day_Prod
# Utility_report_Day_Air:
# column_tag: site_116$dept_134$line_350$equipment_4179$tag_6731
# source: manual
# dependency:
# - None
Beta_Purification_Column_C_2409_Outlet_Flow_TZ:
column_tag: site_116$dept_134$line_351$equipment_4209$tag_5372
source: manual
dependency: None
\ No newline at end of file
from scripts.utils.yield_sheet_3cp_utils.all_tags_3cp import TagsDict
import pandas as pd
from datetime import datetime, timedelta
from loguru import logger
from scripts.utils.yield_sheet_3cp_utils.data_puller_3cp import Kairos_query
class ManualDcsData:
def __init__(self, start_date, end_date, tags_cal, tags_cal_prev, tags_manual, tags_dcs):
self.start_date = start_date
self.end_date = end_date
self.tags_cal = tags_cal
self.tags_cal_prev = tags_cal_prev
self.tags_manual = tags_manual
self.tags_dcs = tags_dcs
def manual_dcs_dataframe(self):
try:
all_manual_dcs_tags_dict, all_cal_tags_dict = TagsDict().all_tags(self.tags_cal, self.tags_cal_prev,
self.tags_manual, self.tags_dcs)
all_manual_dcs_tags = list(all_manual_dcs_tags_dict.values())
# logger.info(f"No of manual and dcs tags = {len(all_manual_dcs_tags)}")
# All calculated tags combined
all_calculated_tags = list(all_cal_tags_dict.values())
# logger.info(f"No of all calculated tags = {len(all_calculated_tags)}")
all_tags = [*all_calculated_tags, *all_manual_dcs_tags]
all_tags_dict = {**all_cal_tags_dict, **all_manual_dcs_tags_dict}
# All dates for which we are going to do calculations
all_dates = [self.start_date + timedelta(days=x) for x in range((self.end_date - self.start_date).days + 1)]
# all_dates.insert(0, self.start_date)
logger.debug(f"Data required for dates : {all_dates}")
# Pulling data for all manual and dcs dates for required dates
date_output = {}
for dates in all_dates:
try:
periodic_start_time = dates
logger.info(f"Pulling Manual and DCS data for date - {periodic_start_time}")
current_date = periodic_start_time
periodic_start_time = int(periodic_start_time.timestamp()) * 1000
periodic_end_time = int(dates.timestamp()) * 1000
query_manual_dcs = Kairos_query().kairos_query(start=periodic_start_time, end=periodic_end_time,
tag=all_manual_dcs_tags)
logger.info(f"{query_manual_dcs}")
data_manual_dcs = Kairos_query().get_data_from_kairos(query=query_manual_dcs,
tags_dict=dict((v, k)
for k, v in all_manual_dcs_tags_dict.items()),
date=current_date)
date_output[current_date] = data_manual_dcs
except Exception as e:
logger.exception(f"Exception occurred", exc_info=True)
return date_output
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
\ No newline at end of file
import pandas as pd
from loguru import logger
class ReorderRename:
def __init__(self,df_ebpr):
self.df_ebpr=df_ebpr
def reorder_rename(self):
try:
df1 = self.df_ebpr[['Date',
'D1D001Readings_T_2703_A_DPR', 'D1D001Readings_T_2703_B_DPR',
'D1D001Readings_T_2101_A_DPR', 'D1D001Readings_T_2101_B_DPR', 'D1D001Readings_Conv_DPR',
'D1D001_consumptions_Opening_DPR', 'D1D001_consumptions_Day_Receipt_DPR',
'D1D001_consumptions_Day_Recovery_DPR',
'D1D001_consumptions_Total_Receipt_DPR', 'D1D001_consumptions_Day_Cons_DPR',
'D1D001_consumptions_Closing_DPR',
'7302011030_Consumptions_Opening_DPR', '7302011030_Consumptions_Day_Receipt_DPR',
'7302011030_Consumptions_Total_Receipt_DPR', '7302011030_Consumptions_Day_Cons_DPR',
'7302011030_Consumptions_Total_Cons_DPR', '7302011030_Consumptions_Closing_DPR',
'7302011061_Consumption_Opening_DPR', '7302011061_Consumption_Day_Receipt_DPR',
'7302011061_Consumption_Total_Receipt_DPR', '7302011061_Consumption_Day_Cons_DPR',
'7302011061_Consumption_Total_Cons_DPR', '7302011061_Consumption_Closing_DPR',
'Crude_Prod_Day_Prod_DPR', 'Crude_Prod_Total_Prod_DPR',
'Pure_Production_Opening_DPR', 'Pure_Production_Day_Prod_DPR',
'Pure_Production_Total_Prod_DPR',
'Pure_Production_Day_Nia_DPR', 'Pure_Production_Total_Nia_DPR',
'Pure_Production_Day_Drum_Filling_DPR',
'Pure_Production_Total_Drum_Filling_DPR',
'Pure_Production_LT_2701_A_DPR', 'Pure_Production_LT_2701_B_DPR',
'Pure_Production_Closing_of_Pure_Tanks_only_DPR',
'Pure_Production_Pure_tank_Dead_Volumes_DPR',
'Utility_report_Day_Power', 'Utility_report_Day_Steam',
'Utility_report_Day_Raffinate', 'Utility_report_Vent_Gas_Raffinate',
'Utility_report_Day_DM',
'Utility_report_Day_DM_norm',
'Utility_report_Day_Treated_Water',
'Utility_report_Raffinate_Incinerated',
# 'Utility_report_Compressor',
'Utility_report_Actual_Ammonia_Norms',
'Utility_report_Actual_Beta_Norms',
'Utility_report_Actual_Benzene_Norms',
# 'Utility_report_Actual_Air_Norms',
'Utility_report_Power_Norms',
'Utility_report_Steam_Norms', 'Utility_report_Raffinate_Norms',
'Utility_report_Raffinate_Vent_Gas', 'Utility_report_Raw_Water_Norms',
# 'Utility_report_Actual_Day_Prod',
'Utility_report_per_hr_burn_rate',
# 'Utility_report_Day_Air'
]]
df1_trans = df1.transpose()
df1_trans['Parameter'] = list(df1_trans.index)
df1_trans.index = [i for i in range(df1_trans.shape[0])]
df1_trans.columns = [df1_trans.iloc[0, :]]
df1_trans = df1_trans.iloc[1:, :]
# shift column 'Date' to first position
first_column = df1_trans.pop(('Date',))
# insert column using insert(position,column_name,first_column) function
df1_trans.insert(0, ('Date',), first_column)
df1_trans['Parameter'] = ["BETAPICOLENE",
"BETAPICOLENE",
"BETAPICOLENE",
"BETAPICOLENE",
"BETAPICOLENE",
"BETAPICOLENE",
"BETAPICOLENE",
"BETAPICOLENE",
"BETAPICOLENE",
"BETAPICOLENE",
"BETAPICOLENE",
"AMMONIA",
"AMMONIA",
"AMMONIA",
"AMMONIA",
"AMMONIA",
"AMMONIA",
"BENZENE",
"BENZENE",
"BENZENE",
"BENZENE",
"BENZENE",
"BENZENE",
"CRUDE",
"CRUDE",
"PURE PROD",
"PURE PROD",
"PURE PROD",
"PURE PROD",
"PURE PROD",
"PURE PROD",
"PURE PROD",
"PURE PROD",
"PURE PROD",
"PURE PROD",
"PURE PROD",
"UTILITY",
"UTILITY",
"UTILITY",
"UTILITY",
"UTILITY",
"UTILITY",
"UTILITY",
"UTILITY",
"UTILITY",
"UTILITY",
"UTILITY",
"UTILITY",
"UTILITY",
"UTILITY",
"UTILITY",
"UTILITY",
"UTILITY",
# "UTILITY",
# "UTILITY",
# "UTILITY"
]
# shift column 'Name' to first position
first_column = df1_trans.pop(('Parameter',))
# insert column using insert(position,column_name,
# first_column) function
df1_trans.insert(0, ('Parameter',), first_column)
df1_trans.columns = [col[0] for col in df1_trans.columns]
gg = df1_trans.groupby(['Parameter', 'Date'])
gg1 = gg.first().transpose()
gg2 = gg1[[('BETAPICOLENE', 'D1D001Readings_Conv_DPR'),
('BETAPICOLENE', 'D1D001Readings_T_2101_A_DPR'),
('BETAPICOLENE', 'D1D001Readings_T_2101_B_DPR'),
('BETAPICOLENE', 'D1D001Readings_T_2703_A_DPR'),
('BETAPICOLENE', 'D1D001Readings_T_2703_B_DPR'),
('BETAPICOLENE', 'D1D001_consumptions_Closing_DPR'),
('BETAPICOLENE', 'D1D001_consumptions_Day_Cons_DPR'),
('BETAPICOLENE', 'D1D001_consumptions_Day_Receipt_DPR'),
('BETAPICOLENE', 'D1D001_consumptions_Day_Recovery_DPR'),
('BETAPICOLENE', 'D1D001_consumptions_Opening_DPR'),
('BETAPICOLENE', 'D1D001_consumptions_Total_Receipt_DPR'),
('AMMONIA', '7302011030_Consumptions_Closing_DPR'),
('AMMONIA', '7302011030_Consumptions_Day_Cons_DPR'),
('AMMONIA', '7302011030_Consumptions_Day_Receipt_DPR'),
('AMMONIA', '7302011030_Consumptions_Opening_DPR'),
('AMMONIA', '7302011030_Consumptions_Total_Cons_DPR'),
('AMMONIA', '7302011030_Consumptions_Total_Receipt_DPR'),
('BENZENE', '7302011061_Consumption_Closing_DPR'),
('BENZENE', '7302011061_Consumption_Day_Cons_DPR'),
('BENZENE', '7302011061_Consumption_Day_Receipt_DPR'),
('BENZENE', '7302011061_Consumption_Opening_DPR'),
('BENZENE', '7302011061_Consumption_Total_Cons_DPR'),
('BENZENE', '7302011061_Consumption_Total_Receipt_DPR'),
('CRUDE', 'Crude_Prod_Day_Prod_DPR'),
('CRUDE', 'Crude_Prod_Total_Prod_DPR'),
('PURE PROD', 'Pure_Production_Closing_of_Pure_Tanks_only_DPR'),
('PURE PROD', 'Pure_Production_Day_Drum_Filling_DPR'),
('PURE PROD', 'Pure_Production_Day_Nia_DPR'),
('PURE PROD', 'Pure_Production_Day_Prod_DPR'),
('PURE PROD', 'Pure_Production_LT_2701_A_DPR'),
('PURE PROD', 'Pure_Production_LT_2701_B_DPR'),
('PURE PROD', 'Pure_Production_Opening_DPR'),
('PURE PROD', 'Pure_Production_Pure_tank_Dead_Volumes_DPR'),
('PURE PROD', 'Pure_Production_Total_Drum_Filling_DPR'),
('PURE PROD', 'Pure_Production_Total_Nia_DPR'),
('PURE PROD', 'Pure_Production_Total_Prod_DPR'),
# ('UTILITY', 'Utility_report_Actual_Air_Norms'),
('UTILITY', 'Utility_report_Actual_Ammonia_Norms'),
('UTILITY', 'Utility_report_Actual_Benzene_Norms'),
('UTILITY', 'Utility_report_Actual_Beta_Norms'),
# ('UTILITY', 'Utility_report_Actual_Day_Prod'),
# ('UTILITY', 'Utility_report_Compressor'),
# ('UTILITY', 'Utility_report_Day_Air'),
('UTILITY', 'Utility_report_Day_DM'),
('UTILITY', 'Utility_report_Day_DM_norm'),
('UTILITY', 'Utility_report_Day_Power'),
('UTILITY', 'Utility_report_Day_Raffinate'),
('UTILITY', 'Utility_report_Day_Steam'),
('UTILITY', 'Utility_report_Day_Treated_Water'),
('UTILITY', 'Utility_report_Power_Norms'),
('UTILITY', 'Utility_report_Raffinate_Incinerated'),
('UTILITY', 'Utility_report_Raffinate_Norms'),
('UTILITY', 'Utility_report_Raffinate_Vent_Gas'),
('UTILITY', 'Utility_report_Raw_Water_Norms'),
('UTILITY', 'Utility_report_Steam_Norms'),
('UTILITY', 'Utility_report_Vent_Gas_Raffinate'),
('UTILITY', 'Utility_report_per_hr_burn_rate')]]
gg2 = gg2[[('BETAPICOLENE', 'D1D001Readings_T_2703_A_DPR'),
('BETAPICOLENE', 'D1D001Readings_T_2703_B_DPR'),
('BETAPICOLENE', 'D1D001Readings_T_2101_A_DPR'),
('BETAPICOLENE', 'D1D001Readings_T_2101_B_DPR'),
('BETAPICOLENE', 'D1D001Readings_Conv_DPR'),
('BETAPICOLENE', 'D1D001_consumptions_Opening_DPR'),
('BETAPICOLENE', 'D1D001_consumptions_Day_Receipt_DPR'),
('BETAPICOLENE', 'D1D001_consumptions_Day_Recovery_DPR'),
('BETAPICOLENE', 'D1D001_consumptions_Total_Receipt_DPR'),
('BETAPICOLENE', 'D1D001_consumptions_Day_Cons_DPR'),
('BETAPICOLENE', 'D1D001_consumptions_Closing_DPR'),
('AMMONIA', '7302011030_Consumptions_Opening_DPR'),
('AMMONIA', '7302011030_Consumptions_Day_Receipt_DPR'),
('AMMONIA', '7302011030_Consumptions_Total_Receipt_DPR'),
('AMMONIA', '7302011030_Consumptions_Day_Cons_DPR'),
('AMMONIA', '7302011030_Consumptions_Total_Cons_DPR'),
('AMMONIA', '7302011030_Consumptions_Closing_DPR'),
('BENZENE', '7302011061_Consumption_Opening_DPR'),
('BENZENE', '7302011061_Consumption_Day_Receipt_DPR'),
('BENZENE', '7302011061_Consumption_Total_Receipt_DPR'),
('BENZENE', '7302011061_Consumption_Day_Cons_DPR'),
('BENZENE', '7302011061_Consumption_Total_Cons_DPR'),
('BENZENE', '7302011061_Consumption_Closing_DPR'),
('CRUDE', 'Crude_Prod_Day_Prod_DPR'),
('CRUDE', 'Crude_Prod_Total_Prod_DPR'),
('PURE PROD', 'Pure_Production_Opening_DPR'),
('PURE PROD', 'Pure_Production_Day_Prod_DPR'),
('PURE PROD', 'Pure_Production_Total_Prod_DPR'),
('PURE PROD', 'Pure_Production_Day_Nia_DPR'),
('PURE PROD', 'Pure_Production_Total_Nia_DPR'),
('PURE PROD', 'Pure_Production_Day_Drum_Filling_DPR'),
('PURE PROD', 'Pure_Production_Total_Drum_Filling_DPR'),
('PURE PROD', 'Pure_Production_LT_2701_A_DPR'),
('PURE PROD', 'Pure_Production_LT_2701_B_DPR'),
('PURE PROD', 'Pure_Production_Closing_of_Pure_Tanks_only_DPR'),
('PURE PROD', 'Pure_Production_Pure_tank_Dead_Volumes_DPR'),
('UTILITY', 'Utility_report_Power_Norms'),
('UTILITY', 'Utility_report_Steam_Norms'),
('UTILITY', 'Utility_report_Raffinate_Norms'),
('UTILITY', 'Utility_report_Raffinate_Vent_Gas'),
('UTILITY', 'Utility_report_Raw_Water_Norms'),
('UTILITY', 'Utility_report_Day_Power'),
('UTILITY', 'Utility_report_Day_Steam'),
('UTILITY', 'Utility_report_Day_Raffinate'),
('UTILITY', 'Utility_report_Vent_Gas_Raffinate'),
('UTILITY', 'Utility_report_Day_DM'),
('UTILITY', 'Utility_report_Day_DM_norm'),
('UTILITY', 'Utility_report_Day_Treated_Water'),
('UTILITY', 'Utility_report_Raffinate_Incinerated'),
('UTILITY', 'Utility_report_per_hr_burn_rate'),
# ('UTILITY', 'Utility_report_Actual_Air_Norms'),
('UTILITY', 'Utility_report_Actual_Ammonia_Norms'),
('UTILITY', 'Utility_report_Actual_Benzene_Norms'),
('UTILITY', 'Utility_report_Actual_Beta_Norms'),
# ('UTILITY', 'Utility_report_Actual_Day_Prod'),
# ('UTILITY', 'Utility_report_Compressor'),
# ('UTILITY', 'Utility_report_Day_Air'),
]]
mapping_dict = {('BETAPICOLENE', 'D1D001Readings_T_2703_A_DPR'): ('D1D001', 'T-2703 A (%)'),
('BETAPICOLENE', 'D1D001Readings_T_2703_B_DPR'): ('D1D001', 'T-2703 B (%)'),
('BETAPICOLENE', 'D1D001Readings_T_2101_A_DPR'): ('D1D001', 'T-2101 A (%)'),
('BETAPICOLENE', 'D1D001Readings_T_2101_B_DPR'): ('D1D001', 'T-2101 B (%)'),
('BETAPICOLENE', 'D1D001Readings_Conv_DPR'): ('D1D001', 'Conversion (%)'),
('BETAPICOLENE', 'D1D001_consumptions_Opening_DPR'): ('D1D001', 'Opening Stock (MT)'),
('BETAPICOLENE', 'D1D001_consumptions_Day_Receipt_DPR'): ('D1D001', 'Day Receipt (MT)'),
('BETAPICOLENE', 'D1D001_consumptions_Day_Recovery_DPR'): ('D1D001', 'Day Recovery (MT)'),
('BETAPICOLENE', 'D1D001_consumptions_Total_Receipt_DPR'): ('D1D001', 'Total Receipt (MT)'),
('BETAPICOLENE', 'D1D001_consumptions_Day_Cons_DPR'): ('D1D001', 'Day Consumption (MT)'),
('BETAPICOLENE', 'D1D001_consumptions_Closing_DPR'): ('D1D001', 'Closing Stock (MT)'),
('AMMONIA', '7302011030_Consumptions_Opening_DPR'): (
'7302011030 Consumption', 'Opening Stock (MT)'),
('AMMONIA', '7302011030_Consumptions_Day_Receipt_DPR'): (
'7302011030 Consumption', 'Day Receipt (MT)'),
('AMMONIA', '7302011030_Consumptions_Total_Receipt_DPR'): (
'7302011030 Consumption', 'Total Receipt (MT)'),
('AMMONIA', '7302011030_Consumptions_Day_Cons_DPR'): (
'7302011030 Consumption', 'Day Consumption (MT)'),
('AMMONIA', '7302011030_Consumptions_Total_Cons_DPR'): (
'7302011030 Consumption', 'Total Consumption (MT)'),
('AMMONIA', '7302011030_Consumptions_Closing_DPR'): (
'7302011030 Consumption', 'Closing Stock (MT)'),
('BENZENE', '7302011061_Consumption_Opening_DPR'): (
'7302011061 Consumption', 'Opening Stock (MT)'),
('BENZENE', '7302011061_Consumption_Day_Receipt_DPR'): (
'7302011061 Consumption', 'Day Receipt (MT)'),
('BENZENE', '7302011061_Consumption_Total_Receipt_DPR'): (
'7302011061 Consumption', 'Total Receipt (MT)'),
('BENZENE', '7302011061_Consumption_Day_Cons_DPR'): (
'7302011061 Consumption', 'Day Consumption (MT)'),
('BENZENE', '7302011061_Consumption_Total_Cons_DPR'): (
'7302011061 Consumption', 'Total Consumption (MT)'),
('BENZENE', '7302011061_Consumption_Closing_DPR'): (
'7302011061 Consumption', 'Closing Stock (MT)'),
('CRUDE', 'Crude_Prod_Day_Prod_DPR'): ('Crude 3-CP Production', 'Day Production (MT)'),
('CRUDE', 'Crude_Prod_Total_Prod_DPR'): ('Crude 3-CP Production', 'Total Production (MT)'),
('PURE PROD', 'Pure_Production_Opening_DPR'): ('Pure 3-CP Production', 'Opening Stock (MT)'),
('PURE PROD', 'Pure_Production_Day_Prod_DPR'): ('Pure 3-CP Production', 'Day Production (MT)'),
('PURE PROD', 'Pure_Production_Total_Prod_DPR'): (
'Pure 3-CP Production', 'Total Production (MT)'),
('PURE PROD', 'Pure_Production_Day_Nia_DPR'): (
'Pure 3-CP Production', 'Day NIA. Transfer (MT)'),
('PURE PROD', 'Pure_Production_Total_Nia_DPR'): (
'Pure 3-CP Production', 'Total NIA. Transfer (MT)'),
('PURE PROD', 'Pure_Production_Day_Drum_Filling_DPR'): (
'Pure 3-CP Production', 'Day Drum Filling (MT)'),
('PURE PROD', 'Pure_Production_Total_Drum_Filling_DPR'): (
'Pure 3-CP Production', 'Total Drum Filling (MT)'),
('PURE PROD', 'Pure_Production_LT_2701_A_DPR'): ('Pure 3-CP Production', 'LT-2701 A (%)'),
('PURE PROD', 'Pure_Production_LT_2701_B_DPR'): ('Pure 3-CP Production', 'LT T-2701 B(%)'),
('PURE PROD', 'Pure_Production_Closing_of_Pure_Tanks_only_DPR'): (
'Pure 3-CP Production', 'Closing Stock of Pure Tanks (MT)'),
('PURE PROD', 'Pure_Production_Pure_tank_Dead_Volumes_DPR'): (
'Pure 3-CP Production', 'Pure tank Dead Volume (MT)'),
('UTILITY', 'Utility_report_Day_Power'): ('UTILITY', 'Day Power Consumption (KWH)'),
('UTILITY', 'Utility_report_Power_Norms'): ('UTILITY', 'Power Norm (KWH/MT)'),
('UTILITY', 'Utility_report_Day_Steam'): ('UTILITY', 'Day Steam Consumption (MT)'),
('UTILITY', 'Utility_report_Steam_Norms'): ('UTILITY', 'Steam Norm (MT/MT)'),
('UTILITY', 'Utility_report_Day_Raffinate'): ('UTILITY', 'Day Raffinate Generation (M3)'),
('UTILITY', 'Utility_report_Raffinate_Norms'): ('UTILITY', 'Raffinate Norm (M3/MT)'),
('UTILITY', 'Utility_report_Vent_Gas_Raffinate'): (
'UTILITY', 'Day Vent Gas Raffinate Generation (M3)'),
('UTILITY', 'Utility_report_Raffinate_Vent_Gas'): ('UTILITY', 'Raffinate Vent Gas Norm (M3/MT)'),
('UTILITY', 'Utility_report_Day_Treated_Water'): (
'UTILITY', 'Day Treated Water Consumption (M3)'),
('UTILITY', 'Utility_report_Raw_Water_Norms'): ('UTILITY', 'Treated Water Norm (M3/MT)'),
('UTILITY', 'Utility_report_Day_DM'): ('UTILITY', 'Day D.M.W. Consumption (M3)'),
('UTILITY', 'Utility_report_Day_DM_norm'): ('UTILITY', 'D.M.W. Norm (M3/MT)'),
('UTILITY', 'Utility_report_Raffinate_Incinerated'): (
'UTILITY', 'Day Raffinate Incinerated (M3)'),
('UTILITY', 'Utility_report_per_hr_burn_rate'): (
'UTILITY', 'Raffinate Incineration Rate (Kg/Hr)'),
# ('UTILITY', 'Utility_report_Actual_Air_Norms'): ('UTILITY', 'Actual_Air_Norm'),
('UTILITY', 'Utility_report_Actual_Ammonia_Norms'): ('UTILITY', 'Ammonia Norm (MT/MT)'),
('UTILITY', 'Utility_report_Actual_Benzene_Norms'): ('UTILITY', 'Benzene Norm (MT/MT)'),
('UTILITY', 'Utility_report_Actual_Beta_Norms'): ('UTILITY', 'Beta Picoline Norm (MT/MT)'),
# ('UTILITY', 'Utility_report_Actual_Day_Prod'): ('UTILITY', 'Actual Day Prod. (MT)'),
# ('UTILITY', 'Utility_report_Compressor'): ('UTILITY', 'Compressor'),
# ('UTILITY', 'Utility_report_Day_Air'): ('UTILITY', 'Day Air')
}
# Dictionary allows using tuples as keys and values
def rename_tuple(tuple_, dict_):
"""Replaces tuple if present in tuple dict"""
if tuple_ in dict_.keys():
return dict_[tuple_]
return tuple_
# Rename chosen elements from list of tuples from df.columns
altered_index_list = [rename_tuple(tuple_, mapping_dict) for tuple_ in gg2.columns.to_list()]
# Update columns with new renamed columns
gg2.columns = pd.Index(altered_index_list)
gg_BETAPICOLENE = gg2.iloc[:, :11]
gg_BETAPICOLENE.insert(gg_BETAPICOLENE.shape[1], " ", " ")
gg_AMMONIA = gg2.iloc[:, 11:17]
gg_AMMONIA.insert(gg_AMMONIA.shape[1], " ", " ")
gg_BEN = gg2.iloc[:, 17:23]
gg_BEN.insert(gg_BEN.shape[1], " ", " ")
gg_CRUDE = gg2.iloc[:, 23:25]
gg_CRUDE.insert(gg_CRUDE.shape[1], " ", " ")
gg_PURE = gg2.iloc[:, 25:36]
gg_PURE.insert(gg_PURE.shape[1], " ", " ")
gg_UTILITY = gg2.iloc[:, 36:53]
gg_UTILITY.insert(gg_UTILITY.shape[1], " ", " ")
df_concat = pd.concat([gg_BETAPICOLENE, gg_AMMONIA, gg_BEN, gg_CRUDE, gg_PURE, gg_UTILITY], axis=1,
ignore_index=False)
df_concat.columns = df_concat.columns.rename("Date", level=1)
return df_concat
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
\ No newline at end of file
import numpy as np
import warnings
warnings.filterwarnings("ignore")
import traceback
from loguru import logger
import pandas as pd
from datetime import datetime, timedelta
from scripts.utils.yield_sheet_3cp_utils.all_tags_3cp import TagsDict
enable_traceback = True
from scripts.utils.yield_sheet_3cp_utils.manual_dcs_dataframe import ManualDcsData
from scripts.utils.yield_sheet_3cp_utils.unavailable_manual_dcs_tags import UnavailableManualDcsTags
from scripts.utils.yield_sheet_3cp_utils.ebpr_manual_dataframe import EbprManualDcsDataframe
from scripts.utils.yield_sheet_3cp_utils.calculated_dataframe import CalculatedDataframe
from scripts.utils.yield_sheet_3cp_utils.concat_manual_calculated_dataframe import ConcatenateManualCalculatedDf
from scripts.utils.yield_sheet_3cp_utils.reorder_renaming_ebpr import ReorderRename
class ReportGenerator:
def __init__(self, tags_cal, tags_cal_prev, tags_manual, tags_dcs, start_date, end_date):
self.tags_cal = tags_cal
self.tags_cal_prev = tags_cal_prev
self.tags_manual = tags_manual
self.tags_dcs = tags_dcs
self.start_date = start_date - timedelta(days=1)
self.end_date = end_date
def yield_report_3cp(self):
try:
date_output = ManualDcsData(start_date=self.start_date, end_date=self.end_date, tags_cal=self.tags_cal,
tags_cal_prev=self.tags_cal_prev, tags_manual=self.tags_manual,
tags_dcs=self.tags_dcs).manual_dcs_dataframe()
all_manual_dcs_tags_dict, all_cal_tags_dict = TagsDict().all_tags(self.tags_cal, self.tags_cal_prev,
self.tags_manual, self.tags_dcs)
all_manual_dcs_tags = list(all_manual_dcs_tags_dict.values())
all_calculated_tags = list(all_cal_tags_dict.values())
all_tags = [*all_calculated_tags, *all_manual_dcs_tags]
all_tags_dict = {**all_cal_tags_dict, **all_manual_dcs_tags_dict}
unavailable_tags, unavailable_data_date = UnavailableManualDcsTags(date_output=date_output,
all_manual_dcs_tags=all_manual_dcs_tags,
all_manual_dcs_tags_dict=all_manual_dcs_tags_dict,
first_date=self.start_date).unavailable_manual_dcs_tags()
if len(unavailable_data_date)>0:
logger.info(f"No data is available for - {unavailable_data_date}")
return None, f"No data is available for - {unavailable_data_date}"
else:
logger.info(f"Given tags are missing for dates - {unavailable_tags}")
unavailable_tags.pop(self.start_date, None)
if len(unavailable_tags)>=1:
logger.info(f"Given tags are missing for Date - {unavailable_tags}")
return None, f"Given tags are missing for Date - {unavailable_tags}"
else:
df_manual_dcs, df_manual_dcs_ebpr, date_output_filtered = EbprManualDcsDataframe(date_output=date_output,
all_manual_dcs_tags=all_manual_dcs_tags).\
ebpr_manual_dcs_dataframe()
df_calculated_ebpr, df_calculated, unavailable_tags_cal = CalculatedDataframe(date_output_filtered=
date_output_filtered,
all_calculated_tags=all_calculated_tags,
all_cal_tags_dict=all_cal_tags_dict,
df_manual_dcs=df_manual_dcs).calculated_df()
df_ebpr = ConcatenateManualCalculatedDf(df_manual_dcs_ebpr=df_manual_dcs_ebpr,
df_calculated_ebpr=df_calculated_ebpr,
start_date=self.start_date).concate_manual_calculated_df()
df_ebpr_copy = df_ebpr.copy()
df_ebpr.replace([np.inf, -np.inf], np.nan, inplace=True)
# a = df_ebpr['Utility_report_Power_Norms'].unique()
# col_name = df_ebpr.columns[1:]
# for col in col_name:
# df_ebpr[col] = df_ebpr[col].replace([inf], 'nan')
total_list = []
for col in [col for col in list(df_ebpr.columns) if col not in ('Date',
'Utility_report_Power_Norms',
'Utility_report_Steam_Norms',
'Utility_report_Raffinate_Norms',
'Utility_report_Raffinate_Vent_Gas',
'Utility_report_Raw_Water_Norms',
'Utility_report_Actual_Ammonia_Norms',
'Utility_report_Actual_Beta_Norms',
'Utility_report_Day_DM_norm',
'Utility_report_Actual_Benzene_Norms')]:
# total_list.append(df_ebpr[col].sum())
df_ebpr.loc['Total', col] = df_ebpr[col].sum()
# df_ebpr.loc['Total'] = [i for i in range(df_ebpr.shape[1])]
# df_ebpr.iloc[-1, 1:] = total_list
df_ebpr['Date'][-1] = 'Total'
average_list = []
for col in [col for col in list(df_ebpr.columns) if col in ('Utility_report_Power_Norms',
'Utility_report_Steam_Norms',
'Utility_report_Raffinate_Norms',
'Utility_report_Raffinate_Vent_Gas',
'Utility_report_Raw_Water_Norms',
'Utility_report_Actual_Ammonia_Norms',
'Utility_report_Actual_Beta_Norms',
'Utility_report_Day_DM_norm',
'Utility_report_Actual_Benzene_Norms')]:
df_ebpr.loc['Average', col] = df_ebpr[col].mean()
# average_list.append(df_ebpr[col].mean())
# df_ebpr.loc['Average'] = [i for i in range(df_ebpr.shape[1])]
# df_ebpr.iloc[-1, 1:] = average_list
df_ebpr['Date'][-1] = 'Average'
df_ebpr = df_ebpr.round(3)
df_concat = ReorderRename(df_ebpr=df_ebpr).reorder_rename()
df_concat.replace({'inf': 'nan'}, inplace=True)
return df_concat, f"Report is ready"
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
return None, f"Error - {e}"
from loguru import logger
class UnavailableManualDcsTags:
def __init__(self, date_output, all_manual_dcs_tags, all_manual_dcs_tags_dict, first_date):
self.date_output=date_output
self.all_manual_dcs_tags=all_manual_dcs_tags
self.all_manual_dcs_tags_dict=all_manual_dcs_tags_dict
self.first_date=first_date
def unavailable_manual_dcs_tags(self):
try:
unavailable_tags = {}
unavailable_data_date = {k: v for k, v in self.date_output.items() if v is None}
date_output_without_none = {k: v for k, v in self.date_output.items() if v is not None}
first_data_date = list(date_output_without_none.keys())[0]
logger.info(f"No data is avaialble for - {unavailable_data_date}")
unavailable_data_date.pop(self.first_date, None)
unavailable_data_date.pop(first_data_date, None)
if len(unavailable_data_date) > 0:
return unavailable_tags, unavailable_data_date
else:
date_output = self.date_output
date_output_copy = date_output.copy()
for k, v in date_output_copy.items():
if self.first_date==k:
if v is None:
date_output.pop(self.first_date, None)
for k, v in date_output.items():
if len(v) != len(self.all_manual_dcs_tags):
required_tags = [tags for tags in list(self.all_manual_dcs_tags_dict.keys())
if tags not in list(v.keys())]
unavailable_tags[k] = required_tags
return unavailable_tags, unavailable_data_date
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
\ No newline at end of file
File added
File added
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment