Commit 6bc89d6c authored by aakash.bedi's avatar aakash.bedi

test

parent 33e97bf0
if __name__ == "__main__":
from dotenv import load_dotenv
load_dotenv(dotenv_path='config.env')
import pandas as pd
import xlsxwriter
import os
import yaml
from datetime import datetime
import pytz
from loguru import logger
from scripts.utils.yield_sheet_3cp_utils.all_tags_3cp import AllTags
from scripts.utils.yield_sheet_3cp_utils.report_generator_3cp import ReportGenerator
# __kwargs__.update(
# start_date=input_data.property.get(
# "start_date",
# datetime.now(tz=pytz.timezone(input_data.tz)).strftime("%Y-%m-%d"),
#
# ),
# end_date=input_data.property.get(
# "end_date",
# datetime.now(tz=pytz.timezone(input_data.tz)).strftime("%Y-%m-%d"))
# )
# try:
# logger.info("Calculating Report for 3CP")
# start_date = str(__kwargs__.get('start_date'))
# end_date = str(__kwargs__.get('end_date'))
yml_file_path = "scripts/utils/yield_sheet_3cp_utils/"
try:
with open(os.path.join(yml_file_path, 'engine.yml'), 'r') as engine_yml_file:
config_engine = yaml.full_load(engine_yml_file)
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
all_tags = config_engine["tag_heirarcy"]
tags_cal, tags_cal_prev, tags_manual, tags_dcs = AllTags().get_tags(all_tags_dictionary=all_tags)
# start_date = datetime.strptime(start_date, '%Y-%m-%d')
# start_date = start_date.astimezone(pytz.UTC)
start_date = datetime.strptime("2022-12-25", '%Y-%m-%d').replace(hour=5, minute=0, second=0,
microsecond=0)
start_date = start_date.astimezone(pytz.timezone("Asia/Kolkata")).replace(hour=5, minute=0, second=0,
microsecond=0)
# end_date = datetime.strptime(end_date, '%Y-%m-%d')
# end_date = end_date.astimezone(pytz.UTC)
end_date = datetime.strptime("2022-12-26", '%Y-%m-%d').replace(hour=5, minute=0, second=0,
microsecond=0)
end_date = end_date.astimezone(pytz.timezone("Asia/Kolkata")).replace(hour=5, minute=0, second=0,
microsecond=0)
df, message = ReportGenerator(tags_cal=tags_cal, tags_cal_prev=tags_cal_prev, tags_manual=tags_manual,
tags_dcs=tags_dcs, start_date=start_date,
end_date=end_date).yield_report_3cp()
logger.debug(f'{df.shape}')
logger.debug(f'{message}')
master_output_file = 'test_prod.xlsx'
writer = pd.ExcelWriter(master_output_file, engine='xlsxwriter')
df.to_excel(writer, sheet_name="DPR Sheet", index=True)
workbook = writer.book
format = workbook.add_format(
{'font_name': 'Trebuchet MS', 'text_wrap': True, 'bold': 2, 'font_color': "blue"})
format.set_align('center')
format.set_align('vcenter')
format1 = workbook.add_format({'font_name': 'Trebuchet MS', 'text_wrap': True})
format1.set_align('center')
format1.set_align('vcenter')
header_footer_format = workbook.add_format({
'text_wrap': True
})
no_of_rows = df.shape[0]
worksheet = writer.sheets["DPR Sheet"]
# set the column width as per your requirement
worksheet.set_column('A:F', 15, format)
worksheet.set_column('G:L', 20, format)
worksheet.set_column('N:T', 22, format)
worksheet.set_column('U:Z', 22, format)
worksheet.set_column('AB:AD', 20, format)
worksheet.set_column('AE:AM', 22, format)
worksheet.set_column('AN:AO', 29, format)
worksheet.set_column('AQ:AU', 27, format)
worksheet.set_column('AV:BC', 33, format)
worksheet.set_column('BD:BD', 30, format)
worksheet.set_column('BE:BH', 27, format)
worksheet.set_column('AY:AY', 40, format)
worksheet.set_column('BA:BA', 35, format)
worksheet.set_column('M:M', 10, format)
worksheet.set_column('T:T', 10, format)
worksheet.set_column('AD:AD', 10, format)
worksheet.set_column('AP:AP', 10, format)
worksheet.set_column('BK:BK', 10, format)
worksheet.set_column('AA:AA', 10, format)
worksheet.set_column('AT:AT', 30, format)
worksheet.set_column('BA:BA', 25, format)
format4 = workbook.add_format({'bg_color': 'yellow'})
format5 = workbook.add_format({'text_wrap': True})
worksheet.set_row(0, 28, format5)
worksheet.conditional_format(f'A{no_of_rows + 2}:AP{no_of_rows + 2}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
worksheet.conditional_format(f'AV{no_of_rows + 2}:AZ{no_of_rows + 2}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
worksheet.conditional_format(f'BB{no_of_rows + 2}:BD{no_of_rows + 2}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
# worksheet.conditional_format(f'BF{no_of_rows + 2}:BF{no_of_rows + 2}',
# {'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
worksheet.conditional_format(f'AQ{no_of_rows + 3}:AU{no_of_rows + 3}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
worksheet.conditional_format(f'BA{no_of_rows + 3}:BA{no_of_rows + 3}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
worksheet.conditional_format(f'BE{no_of_rows + 3}:BG{no_of_rows + 3}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
writer.save()
if not master_output_file.endswith(".xlsx"):
master_output_file = master_output_file + ".xlsx"
logger.info("XLSX is getting stitched")
[KAIROS_DB]
uri = $KAIROS_URI
KAIROS_URI= https://iLens:iLensJUB$456@jub-kairos.ilens.io/kairos
if __name__ == "__main__":
from dotenv import load_dotenv
load_dotenv(dotenv_path='config.env')
import datetime
import os
import os.path
import sys
from configparser import ConfigParser, BasicInterpolation
import yaml
from loguru import logger
# Configuring file constants
data_conf = "./conf/data.yml"
engine_conf = "./conf/engine.yml"
class EnvInterpolation(BasicInterpolation):
"""
Interpolation which expands environment variables in values.
"""
def before_get(self, parser, section, option, value, defaults):
value = super().before_get(parser, section, option, value, defaults)
if not os.path.expandvars(value).startswith("$"):
return os.path.expandvars(value)
else:
return
try:
config = ConfigParser(interpolation=EnvInterpolation())
config.read(f"conf/application.conf")
except Exception as e:
print(f"Error while loading the config: {e}")
print("Failed to Load Configuration. Exiting!!!")
sys.exit()
class DB:
class KairosDb:
uri = config["KAIROS_DB"]["uri"]
{
"_id" : "62a7225ab76af54830e2074d",
"id" : "yield_report_3cp",
"name" : "Yield Report 3cp",
"template_type" : "yield_report_3cp",
"project_id" : "project_107",
"associate_hierarchy" : false,
"deleteList" : [],
"description" : "Yield Report 3cp",
"sequence" : [],
"siteHierarchy" : null,
"siteHierarchyLevel" : "",
"template_conf" : {},
"isPublished" : true,
"uploadType" : "excel",
"meta" : {
"created_on" : 1638421200000,
"created_by" : "user_335"
},
"info" : {
"logbooks" : [
"logbook_152"
],
"download_formats" : [
"excel"
],
"download_properties" : [
{
"key" : "dateRange",
"label" : "Date Range",
"start_date" : null,
"end_date" : null,
"source" : "ebpr",
"logbook_id" : "logbook_152"
}
]
}
}
\ No newline at end of file
from loguru import logger
class TagsDict:
def __init__(self):
pass
def Merge(self, dict1, dict2):
return {**dict1, **dict2}
def all_tags(self, tags_cal, tags_cal_prev, tags_manual, tags_dcs):
try:
manual_dcs_tags = self.Merge(dict1=tags_manual, dict2=tags_dcs)
cal_calprev_tags = self.Merge(dict1=tags_cal, dict2=tags_cal_prev)
logger.info(f"No of combined Manual & DCS tags = {len(manual_dcs_tags)} \
No of Manual tags only = {len(tags_manual)} \
No of DCS tags only = {len(tags_dcs)}")
logger.info(f"No of combined Cal & Cal prev tags = {len(cal_calprev_tags)} \
No of Cal tags only = {len(tags_cal)} \
No of Cal prev tags only = {len(tags_cal_prev)}")
return manual_dcs_tags, cal_calprev_tags
except Exception as e:
logger.exception("Exception occurred", exc_info=True)
class AllTags:
def __init__(self):
pass
def get_tags(self, all_tags_dictionary):
try:
cal_tags = {tag: {"column_tag": all_tags_dictionary[tag]["column_tag"],
"source": all_tags_dictionary[tag]["source"]} for tag, v in all_tags_dictionary.items() if
all_tags_dictionary[tag]["source"] == 'calculated'}
cal_prev_tags = {tag: {"column_tag": all_tags_dictionary[tag]["column_tag"],
"source": all_tags_dictionary[tag]["source"]} for tag, v in
all_tags_dictionary.items() if
all_tags_dictionary[tag]["source"] == 'calculated_depends_previous_value'}
manual_tags = {tag: {"column_tag": all_tags_dictionary[tag]["column_tag"],
"source": all_tags_dictionary[tag]["source"]} for tag, v in all_tags_dictionary.items()
if all_tags_dictionary[tag]["source"] == 'manual'}
dcs_tags = {tag: {"column_tag": all_tags_dictionary[tag]["column_tag"],
"source": all_tags_dictionary[tag]["source"]} for tag, v in all_tags_dictionary.items() if
all_tags_dictionary[tag]["source"] == 'dcs'}
return self.tags_category(cal_tags, cal_prev_tags, manual_tags, dcs_tags)
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
def tags_category(self, cal_tags, cal_prev_tags, manual_tags, dcs_tags):
try:
tags_cal = {tag: cal_tags[tag]["column_tag"] for tag, v in cal_tags.items()}
tags_cal_prev = {tag: cal_prev_tags[tag]["column_tag"] for tag, v in cal_prev_tags.items()}
tags_manual = {tag: manual_tags[tag]["column_tag"] for tag, v in manual_tags.items()}
tags_dcs = {tag: dcs_tags[tag]["column_tag"] for tag, v in dcs_tags.items()}
return tags_cal, tags_cal_prev, tags_manual, tags_dcs
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
import pandas as pd
from datetime import datetime, timedelta
from loguru import logger
from scripts.utils.yield_sheet_3cp_utils.data_puller_3cp import Kairos_query
from scripts.utils.yield_sheet_3cp_utils.data_calculator import CalculationCalculatedColumns
class CalculatedDataframe:
def __init__(self, date_output_filtered, all_calculated_tags, all_cal_tags_dict, df_manual_dcs):
self.date_output_filtered=date_output_filtered
self.all_calculated_tags=all_calculated_tags
self.all_cal_tags_dict=all_cal_tags_dict
self.df_manual_dcs=df_manual_dcs
def unavailable_cal_tags(self, date_output_calculated):
try:
unavailable_tags_cal = {}
for k, v in date_output_calculated.items():
if len(v) != len(self.all_calculated_tags):
required_tags_cal = [tags for tags in list(self.all_cal_tags_dict.keys()) if
tags not in list(v.keys())]
unavailable_tags_cal[k] = required_tags_cal
return unavailable_tags_cal
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
def calculated_df(self):
try:
date_output_calculated = {}
start_date_calculated_tags = list(self.date_output_filtered.keys())[0]
end_date_calculated_tags = start_date_calculated_tags
periodic_start_time_calculated_tags = int(start_date_calculated_tags.timestamp()) * 1000
periodic_end_time_calculated_tags = int((end_date_calculated_tags).timestamp()) * 1000
query_calculated = Kairos_query().kairos_query(start=periodic_start_time_calculated_tags,
end=periodic_end_time_calculated_tags,
tag=self.all_calculated_tags)
# logger.info(f"Query = {query_calculated}")
data_calculated = Kairos_query().get_data_from_kairos(query=query_calculated, tags_dict=dict(
(v, k) for k, v in self.all_cal_tags_dict.items()), date=start_date_calculated_tags)
# logger.debug(f"Data of all calculated tags for 1st day = {data_calculated}")
date_output_calculated[start_date_calculated_tags] = data_calculated
unavailable_tags_cal = self.unavailable_cal_tags(date_output_calculated)
logger.info(f"unavailable tags calculated = {unavailable_tags_cal}")
columns_tags_calculated = list(list(date_output_calculated.values())[0].keys())
value_list_calculated = [list(dic.values()) for dic in list(date_output_calculated.values())]
value_list_calculated_flat = [item for sublist in value_list_calculated for item in sublist]
df_calculated = pd.DataFrame(index=[no for no in range(self.df_manual_dcs.shape[0])],
columns=columns_tags_calculated)
df_calculated.iloc[0, :] = value_list_calculated_flat
logger.debug(f"Dataframe shape of Calculated tags = {df_calculated.shape}")
logger.debug(f"{df_calculated}")
logger.info(f"Calculating calculated column values started")
df_calculated = CalculationCalculatedColumns().calculations(df_manual_dcs=self.df_manual_dcs,
df_calculated=df_calculated)
logger.info(f"The shape of dataframe = {df_calculated.shape}")
for col in df_calculated.columns:
df_calculated[col] = df_calculated[col].astype(float).round(4)
df_calculated_ebpr = df_calculated.copy()
return df_calculated_ebpr, df_calculated, unavailable_tags_cal
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
\ No newline at end of file
import pandas as pd
from loguru import logger
class ConcatenateManualCalculatedDf:
def __init__(self, df_manual_dcs_ebpr, df_calculated_ebpr, start_date):
self.df_manual_dcs_ebpr=df_manual_dcs_ebpr
self.df_calculated_ebpr=df_calculated_ebpr
self.start_date=start_date
def concate_manual_calculated_df(self):
try:
self.df_manual_dcs_ebpr.drop(['Beta_Purification_Column_C_2409_Outlet_Flow_TZ'], axis=1, inplace=True)
df_ebpr = pd.concat([self.df_manual_dcs_ebpr, self.df_calculated_ebpr], axis=1)
if df_ebpr['Date'][0] == self.start_date:
df_ebpr = df_ebpr[df_ebpr['Date'] > self.start_date]
# df_ebpr['Date'] = df_ebpr['Date'] + timedelta(days=1)
df_ebpr['Date'] = df_ebpr['Date'].dt.strftime('%Y-%m-%d')
return df_ebpr
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
\ No newline at end of file
This diff is collapsed.
import json
import pandas as pd
import requests
from loguru import logger
from scripts.constants.app_configuration import DB
class Kairos_query:
def __init__(self):
self.kairos_host = DB.KairosDb.uri
# "https://iLens:iLensJUB$456@jubilant.ilens.io/kairos"
self.kairos_url = "{kairos_host}/api/v1/datapoints/query".format(
kairos_host=self.kairos_host)
# logger.info(f"kairos URL is {self.kairos_url}")
def kairos_query(self, start, end, tag):
try:
return {
"metrics": [
{
"tags": {
"c3": tag
},
"name": "project_227__ilens.live_data.raw",
"group_by": [
{
"name": "tag",
"tags": ["c3"]
}
],
"aggregators": [
{
"name": "last",
"sampling": {
"value": "1",
"unit": "minutes"
}
}
]
}
],
"plugins": [],
"cache_time": 0,
"time_zone": "Asia/Calcutta",
"start_absolute": start,
"end_absolute": end,
}
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
def get_data_from_kairos(self, query, tags_dict, date):
output = {}
try:
response = requests.post(self.kairos_url, data=json.dumps(query))
abb = response.json()
grouped_output_data = response.json()["queries"][0]["results"]
for each_grouped_data in grouped_output_data:
value = (each_grouped_data["values"])
tem = each_grouped_data.get('group_by', None)
tag_id = None
if tem is not None:
tag_id = each_grouped_data["group_by"][0]["group"]["c3"]
else:
for k, v in tags_dict.items():
output[v] = 0
# return output
try:
output[tags_dict[tag_id]] = round(value[0][1], 2)
except Exception as e:
logger.exception(f"Exception occurred for tag = {tag_id} and date = {date}", exc_info=True)
output[tags_dict[tag_id]] = 0
return output
except Exception as e:
logger.exception(f"Exception occurred - {e} for date = {date}", exc_info=True)
import pandas as pd
from datetime import datetime, timedelta
from loguru import logger
class EbprManualDcsDataframe:
def __init__(self, date_output, all_manual_dcs_tags):
self.date_output=date_output
self.all_manual_dcs_tags=all_manual_dcs_tags
def ebpr_manual_dcs_dataframe(self):
try:
date_output_filtered = self.date_output
# date_output_filtered = {k: v for k, v in date_output_filtered.items() if len(v) == len(self.all_manual_dcs_tags)}
columns_tags = list(list(date_output_filtered.values())[0].keys())
value_list = [list(dic.values()) for dic in list(date_output_filtered.values())]
df_manual_dcs = pd.DataFrame(columns=columns_tags, data=value_list)
# df_manual_dcs = df_manual_dcs.round(decimals=3)
df_manual_dcs_dates = list(date_output_filtered.keys())
date_list = [list(date_output_filtered.keys())[0] + timedelta(days=x) \
for x in range((list(date_output_filtered.keys())[-1] -
list(date_output_filtered.keys())[0]).days + 1)]
date_list1 = list(date_output_filtered.keys())
df_manual_dcs["Date"] = date_list1
# df_manual_dcs.drop(['Beta_Purification_Column_C_2409_Outlet_Flow_TZ'], axis=1, inplace=True)
df_manual_dcs_ebpr = df_manual_dcs.copy()
# shift column 'Name' to first position
first_column = df_manual_dcs_ebpr.pop('Date')
# insert column using insert(position,column_name,
# first_column) function
df_manual_dcs_ebpr.insert(0, 'Date', first_column)
logger.debug(f"Dataframe shape of Manual and dcs tags = {df_manual_dcs.shape}")
logger.info(f"No of rows containing NaN values in manual and dcs data = "
f"{df_manual_dcs[df_manual_dcs.isnull().any(axis=1)].shape[0]}")
null_col_manual_list = [col for col in df_manual_dcs.columns if df_manual_dcs[col].isnull().any()]
logger.info(f"Columns containing NaN values in manual and dcs data = {null_col_manual_list}")
return df_manual_dcs, df_manual_dcs_ebpr, date_output_filtered
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
This diff is collapsed.
from scripts.utils.yield_sheet_3cp_utils.all_tags_3cp import TagsDict
import pandas as pd
from datetime import datetime, timedelta
from loguru import logger
from scripts.utils.yield_sheet_3cp_utils.data_puller_3cp import Kairos_query
class ManualDcsData:
def __init__(self, start_date, end_date, tags_cal, tags_cal_prev, tags_manual, tags_dcs):
self.start_date = start_date
self.end_date = end_date
self.tags_cal = tags_cal
self.tags_cal_prev = tags_cal_prev
self.tags_manual = tags_manual
self.tags_dcs = tags_dcs
def manual_dcs_dataframe(self):
try:
all_manual_dcs_tags_dict, all_cal_tags_dict = TagsDict().all_tags(self.tags_cal, self.tags_cal_prev,
self.tags_manual, self.tags_dcs)
all_manual_dcs_tags = list(all_manual_dcs_tags_dict.values())
# logger.info(f"No of manual and dcs tags = {len(all_manual_dcs_tags)}")
# All calculated tags combined
all_calculated_tags = list(all_cal_tags_dict.values())
# logger.info(f"No of all calculated tags = {len(all_calculated_tags)}")
all_tags = [*all_calculated_tags, *all_manual_dcs_tags]
all_tags_dict = {**all_cal_tags_dict, **all_manual_dcs_tags_dict}
# All dates for which we are going to do calculations
all_dates = [self.start_date + timedelta(days=x) for x in range((self.end_date - self.start_date).days + 1)]
# all_dates.insert(0, self.start_date)
logger.debug(f"Data required for dates : {all_dates}")
# Pulling data for all manual and dcs dates for required dates
date_output = {}
for dates in all_dates:
try:
periodic_start_time = dates
logger.info(f"Pulling Manual and DCS data for date - {periodic_start_time}")
current_date = periodic_start_time
periodic_start_time = int(periodic_start_time.timestamp()) * 1000
periodic_end_time = int(dates.timestamp()) * 1000
query_manual_dcs = Kairos_query().kairos_query(start=periodic_start_time, end=periodic_end_time,
tag=all_manual_dcs_tags)
logger.info(f"{query_manual_dcs}")
data_manual_dcs = Kairos_query().get_data_from_kairos(query=query_manual_dcs,
tags_dict=dict((v, k)
for k, v in all_manual_dcs_tags_dict.items()),
date=current_date)
date_output[current_date] = data_manual_dcs
except Exception as e:
logger.exception(f"Exception occurred", exc_info=True)
return date_output
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
\ No newline at end of file
This diff is collapsed.
import numpy as np
import warnings
warnings.filterwarnings("ignore")
import traceback
from loguru import logger
import pandas as pd
from datetime import datetime, timedelta
from scripts.utils.yield_sheet_3cp_utils.all_tags_3cp import TagsDict
enable_traceback = True
from scripts.utils.yield_sheet_3cp_utils.manual_dcs_dataframe import ManualDcsData
from scripts.utils.yield_sheet_3cp_utils.unavailable_manual_dcs_tags import UnavailableManualDcsTags
from scripts.utils.yield_sheet_3cp_utils.ebpr_manual_dataframe import EbprManualDcsDataframe
from scripts.utils.yield_sheet_3cp_utils.calculated_dataframe import CalculatedDataframe
from scripts.utils.yield_sheet_3cp_utils.concat_manual_calculated_dataframe import ConcatenateManualCalculatedDf
from scripts.utils.yield_sheet_3cp_utils.reorder_renaming_ebpr import ReorderRename
class ReportGenerator:
def __init__(self, tags_cal, tags_cal_prev, tags_manual, tags_dcs, start_date, end_date):
self.tags_cal = tags_cal
self.tags_cal_prev = tags_cal_prev
self.tags_manual = tags_manual
self.tags_dcs = tags_dcs
self.start_date = start_date - timedelta(days=1)
self.end_date = end_date
def yield_report_3cp(self):
try:
date_output = ManualDcsData(start_date=self.start_date, end_date=self.end_date, tags_cal=self.tags_cal,
tags_cal_prev=self.tags_cal_prev, tags_manual=self.tags_manual,
tags_dcs=self.tags_dcs).manual_dcs_dataframe()
all_manual_dcs_tags_dict, all_cal_tags_dict = TagsDict().all_tags(self.tags_cal, self.tags_cal_prev,
self.tags_manual, self.tags_dcs)
all_manual_dcs_tags = list(all_manual_dcs_tags_dict.values())
all_calculated_tags = list(all_cal_tags_dict.values())
all_tags = [*all_calculated_tags, *all_manual_dcs_tags]
all_tags_dict = {**all_cal_tags_dict, **all_manual_dcs_tags_dict}
unavailable_tags, unavailable_data_date = UnavailableManualDcsTags(date_output=date_output,
all_manual_dcs_tags=all_manual_dcs_tags,
all_manual_dcs_tags_dict=all_manual_dcs_tags_dict,
first_date=self.start_date).unavailable_manual_dcs_tags()
if len(unavailable_data_date)>0:
logger.info(f"No data is available for - {unavailable_data_date}")
return None, f"No data is available for - {unavailable_data_date}"
else:
logger.info(f"Given tags are missing for dates - {unavailable_tags}")
unavailable_tags.pop(self.start_date, None)
if len(unavailable_tags)>=1:
logger.info(f"Given tags are missing for Date - {unavailable_tags}")
return None, f"Given tags are missing for Date - {unavailable_tags}"
else:
df_manual_dcs, df_manual_dcs_ebpr, date_output_filtered = EbprManualDcsDataframe(date_output=date_output,
all_manual_dcs_tags=all_manual_dcs_tags).\
ebpr_manual_dcs_dataframe()
df_calculated_ebpr, df_calculated, unavailable_tags_cal = CalculatedDataframe(date_output_filtered=
date_output_filtered,
all_calculated_tags=all_calculated_tags,
all_cal_tags_dict=all_cal_tags_dict,
df_manual_dcs=df_manual_dcs).calculated_df()
df_ebpr = ConcatenateManualCalculatedDf(df_manual_dcs_ebpr=df_manual_dcs_ebpr,
df_calculated_ebpr=df_calculated_ebpr,
start_date=self.start_date).concate_manual_calculated_df()
df_ebpr_copy = df_ebpr.copy()
df_ebpr.replace([np.inf, -np.inf], np.nan, inplace=True)
# a = df_ebpr['Utility_report_Power_Norms'].unique()
# col_name = df_ebpr.columns[1:]
# for col in col_name:
# df_ebpr[col] = df_ebpr[col].replace([inf], 'nan')
total_list = []
for col in [col for col in list(df_ebpr.columns) if col not in ('Date',
'Utility_report_Power_Norms',
'Utility_report_Steam_Norms',
'Utility_report_Raffinate_Norms',
'Utility_report_Raffinate_Vent_Gas',
'Utility_report_Raw_Water_Norms',
'Utility_report_Actual_Ammonia_Norms',
'Utility_report_Actual_Beta_Norms',
'Utility_report_Day_DM_norm',
'Utility_report_Actual_Benzene_Norms')]:
# total_list.append(df_ebpr[col].sum())
df_ebpr.loc['Total', col] = df_ebpr[col].sum()
# df_ebpr.loc['Total'] = [i for i in range(df_ebpr.shape[1])]
# df_ebpr.iloc[-1, 1:] = total_list
df_ebpr['Date'][-1] = 'Total'
average_list = []
for col in [col for col in list(df_ebpr.columns) if col in ('Utility_report_Power_Norms',
'Utility_report_Steam_Norms',
'Utility_report_Raffinate_Norms',
'Utility_report_Raffinate_Vent_Gas',
'Utility_report_Raw_Water_Norms',
'Utility_report_Actual_Ammonia_Norms',
'Utility_report_Actual_Beta_Norms',
'Utility_report_Day_DM_norm',
'Utility_report_Actual_Benzene_Norms')]:
df_ebpr.loc['Average', col] = df_ebpr[col].mean()
# average_list.append(df_ebpr[col].mean())
# df_ebpr.loc['Average'] = [i for i in range(df_ebpr.shape[1])]
# df_ebpr.iloc[-1, 1:] = average_list
df_ebpr['Date'][-1] = 'Average'
df_ebpr = df_ebpr.round(3)
df_concat = ReorderRename(df_ebpr=df_ebpr).reorder_rename()
df_concat.replace({'inf': 'nan'}, inplace=True)
return df_concat, f"Report is ready"
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
return None, f"Error - {e}"
from loguru import logger
class UnavailableManualDcsTags:
def __init__(self, date_output, all_manual_dcs_tags, all_manual_dcs_tags_dict, first_date):
self.date_output=date_output
self.all_manual_dcs_tags=all_manual_dcs_tags
self.all_manual_dcs_tags_dict=all_manual_dcs_tags_dict
self.first_date=first_date
def unavailable_manual_dcs_tags(self):
try:
unavailable_tags = {}
unavailable_data_date = {k: v for k, v in self.date_output.items() if v is None}
date_output_without_none = {k: v for k, v in self.date_output.items() if v is not None}
first_data_date = list(date_output_without_none.keys())[0]
logger.info(f"No data is avaialble for - {unavailable_data_date}")
unavailable_data_date.pop(self.first_date, None)
unavailable_data_date.pop(first_data_date, None)
if len(unavailable_data_date) > 0:
return unavailable_tags, unavailable_data_date
else:
date_output = self.date_output
date_output_copy = date_output.copy()
for k, v in date_output_copy.items():
if self.first_date==k:
if v is None:
date_output.pop(self.first_date, None)
for k, v in date_output.items():
if len(v) != len(self.all_manual_dcs_tags):
required_tags = [tags for tags in list(self.all_manual_dcs_tags_dict.keys())
if tags not in list(v.keys())]
unavailable_tags[k] = required_tags
return unavailable_tags, unavailable_data_date
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
\ No newline at end of file
File added
File added
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment