Commit f3d4565b authored by aakash.bedi's avatar aakash.bedi

modifications

parent 6bc89d6c
...@@ -9,7 +9,34 @@ from datetime import datetime ...@@ -9,7 +9,34 @@ from datetime import datetime
import pytz import pytz
from loguru import logger from loguru import logger
from scripts.utils.yield_sheet_3cp_utils.all_tags_3cp import AllTags from scripts.utils.yield_sheet_3cp_utils.all_tags_3cp import AllTags
from scripts.utils.yield_sheet_3cp_utils.report_generator_3cp import ReportGenerator from scripts.utils.yield_sheet_3cp_utils.report_generator_3cp import ReportGenerator, get_dpr_report_format, \
get_dpr
def form_excel_multiindex(df, df_format):
# ebpr_dir = os.path.join(FilePath.report_directory, "ebpr")
# if not os.path.exists(ebpr_dir):
# os.mkdir(ebpr_dir)
#
# output_dir = os.path.join(ebpr_dir, "yield")
# if not os.path.exists(output_dir):
# os.mkdir(output_dir)
master_output_file = 'test_prod.xlsx'
master_output_file = get_dpr(master_output_file=master_output_file, df=df, df_format=df_format)
logger.info("XLSX is getting stitched")
# logger.info(f"{os.listdir(output_dir)}")
if os.path.isfile(master_output_file):
return master_output_file, ""
else:
return None, "File not created"
# __kwargs__.update( # __kwargs__.update(
# start_date=input_data.property.get( # start_date=input_data.property.get(
# "start_date", # "start_date",
...@@ -37,92 +64,23 @@ all_tags = config_engine["tag_heirarcy"] ...@@ -37,92 +64,23 @@ all_tags = config_engine["tag_heirarcy"]
tags_cal, tags_cal_prev, tags_manual, tags_dcs = AllTags().get_tags(all_tags_dictionary=all_tags) tags_cal, tags_cal_prev, tags_manual, tags_dcs = AllTags().get_tags(all_tags_dictionary=all_tags)
# start_date = datetime.strptime(start_date, '%Y-%m-%d') # start_date = datetime.strptime(start_date, '%Y-%m-%d')
# start_date = start_date.astimezone(pytz.UTC) # start_date = start_date.astimezone(pytz.UTC)
start_date = datetime.strptime("2022-12-25", '%Y-%m-%d').replace(hour=5, minute=0, second=0, start_date = datetime.strptime("2023-02-05", '%Y-%m-%d').replace(hour=5, minute=0, second=0,
microsecond=0) microsecond=0)
start_date = start_date.astimezone(pytz.timezone("Asia/Kolkata")).replace(hour=5, minute=0, second=0, start_date = start_date.astimezone(pytz.timezone("Asia/Kolkata")).replace(hour=5, minute=0, second=0,
microsecond=0) microsecond=0)
# end_date = datetime.strptime(end_date, '%Y-%m-%d') end_date = datetime.strptime("2023-03-20", '%Y-%m-%d').replace(hour=5, minute=0, second=0,
# end_date = end_date.astimezone(pytz.UTC)
end_date = datetime.strptime("2022-12-26", '%Y-%m-%d').replace(hour=5, minute=0, second=0,
microsecond=0) microsecond=0)
end_date = end_date.astimezone(pytz.timezone("Asia/Kolkata")).replace(hour=5, minute=0, second=0, end_date = end_date.astimezone(pytz.timezone("Asia/Kolkata")).replace(hour=5, minute=0, second=0,
microsecond=0) microsecond=0)
df, message = ReportGenerator(tags_cal=tags_cal, tags_cal_prev=tags_cal_prev, tags_manual=tags_manual, df_format, df, message = ReportGenerator(tags_cal=tags_cal, tags_cal_prev=tags_cal_prev, tags_manual=tags_manual,
tags_dcs=tags_dcs, start_date=start_date, tags_dcs=tags_dcs, start_date=start_date,
end_date=end_date).yield_report_3cp() end_date=end_date).yield_report_3cp()
logger.debug(f'{df.shape}') logger.debug(f'{df.shape}')
logger.debug(f'{message}') logger.debug(f'{message}')
master_output_file = 'test_prod.xlsx' form_excel_multiindex(df=df, df_format=df_format)
writer = pd.ExcelWriter(master_output_file, engine='xlsxwriter')
df.to_excel(writer, sheet_name="DPR Sheet", index=True)
workbook = writer.book
format = workbook.add_format(
{'font_name': 'Trebuchet MS', 'text_wrap': True, 'bold': 2, 'font_color': "blue"})
format.set_align('center')
format.set_align('vcenter')
format1 = workbook.add_format({'font_name': 'Trebuchet MS', 'text_wrap': True})
format1.set_align('center')
format1.set_align('vcenter')
header_footer_format = workbook.add_format({
'text_wrap': True
})
no_of_rows = df.shape[0]
worksheet = writer.sheets["DPR Sheet"]
# set the column width as per your requirement
worksheet.set_column('A:F', 15, format)
worksheet.set_column('G:L', 20, format)
worksheet.set_column('N:T', 22, format)
worksheet.set_column('U:Z', 22, format)
worksheet.set_column('AB:AD', 20, format)
worksheet.set_column('AE:AM', 22, format)
worksheet.set_column('AN:AO', 29, format)
worksheet.set_column('AQ:AU', 27, format)
worksheet.set_column('AV:BC', 33, format)
worksheet.set_column('BD:BD', 30, format)
worksheet.set_column('BE:BH', 27, format)
worksheet.set_column('AY:AY', 40, format)
worksheet.set_column('BA:BA', 35, format)
worksheet.set_column('M:M', 10, format)
worksheet.set_column('T:T', 10, format)
worksheet.set_column('AD:AD', 10, format)
worksheet.set_column('AP:AP', 10, format)
worksheet.set_column('BK:BK', 10, format)
worksheet.set_column('AA:AA', 10, format)
worksheet.set_column('AT:AT', 30, format)
worksheet.set_column('BA:BA', 25, format)
format4 = workbook.add_format({'bg_color': 'yellow'})
format5 = workbook.add_format({'text_wrap': True})
worksheet.set_row(0, 28, format5)
worksheet.conditional_format(f'A{no_of_rows + 2}:AP{no_of_rows + 2}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
worksheet.conditional_format(f'AV{no_of_rows + 2}:AZ{no_of_rows + 2}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
worksheet.conditional_format(f'BB{no_of_rows + 2}:BD{no_of_rows + 2}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
# worksheet.conditional_format(f'BF{no_of_rows + 2}:BF{no_of_rows + 2}',
# {'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
worksheet.conditional_format(f'AQ{no_of_rows + 3}:AU{no_of_rows + 3}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
worksheet.conditional_format(f'BA{no_of_rows + 3}:BA{no_of_rows + 3}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
worksheet.conditional_format(f'BE{no_of_rows + 3}:BG{no_of_rows + 3}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
writer.save()
if not master_output_file.endswith(".xlsx"):
master_output_file = master_output_file + ".xlsx"
logger.info("XLSX is getting stitched")
[KAIROS_DB] [KAIROS_DB]
uri = $KAIROS_URI uri = $KAIROS_URI
[LOGGING]
level = $LOG_LEVEL
traceback = $LOG_TRACEBACK
KAIROS_URI= https://iLens:iLensJUB$456@jub-kairos.ilens.io/kairos KAIROS_URI=https://iLens:iLensJUB$456@jub-kairos.ilens.io/kairos
LOG_LEVEL=INFO
LOG_TRACEBACK=true
2023-03-23 15:15:23 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 15:43:36 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 16:01:48 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 17:55:59 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 17:56:03 - ERROR - [MainThread:get_excel_format():102] - Exception - Unalignable boolean Series provided as indexer (index of the boolean Series and of the indexed object do not match).
Traceback (most recent call last):
File "E:\Data\GitCodes\3cp changes\3cp_test\scripts\utils\yield_sheet_3cp_utils\report_generator_3cp.py", line 86, in get_excel_format
df_format = df[df_format['month'] == latest_month]
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\frame.py", line 3449, in __getitem__
return self._getitem_bool_array(key)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\frame.py", line 3502, in _getitem_bool_array
key = check_bool_indexer(self.index, key)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexing.py", line 2388, in check_bool_indexer
raise IndexingError(
pandas.core.indexing.IndexingError: Unalignable boolean Series provided as indexer (index of the boolean Series and of the indexed object do not match).
2023-03-23 17:56:18 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 17:56:58 - ERROR - [MainThread:get_excel_format():102] - Exception - Unalignable boolean Series provided as indexer (index of the boolean Series and of the indexed object do not match).
Traceback (most recent call last):
File "E:\Data\GitCodes\3cp changes\3cp_test\scripts\utils\yield_sheet_3cp_utils\report_generator_3cp.py", line 86, in get_excel_format
df_format = df[df_format['month'] == latest_month]
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\frame.py", line 3449, in __getitem__
return self._getitem_bool_array(key)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\frame.py", line 3502, in _getitem_bool_array
key = check_bool_indexer(self.index, key)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexing.py", line 2388, in check_bool_indexer
raise IndexingError(
pandas.core.indexing.IndexingError: Unalignable boolean Series provided as indexer (index of the boolean Series and of the indexed object do not match).
2023-03-23 17:57:48 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 17:59:31 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 18:00:00 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 18:00:23 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 22:51:59 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 22:52:19 - ERROR - [MainThread:get_dpr_report_format():146] - Exception - 0
Traceback (most recent call last):
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexes\base.py", line 3361, in get_loc
return self._engine.get_loc(casted_key)
File "pandas\_libs\index.pyx", line 76, in pandas._libs.index.IndexEngine.get_loc
File "pandas\_libs\index.pyx", line 108, in pandas._libs.index.IndexEngine.get_loc
File "pandas\_libs\hashtable_class_helper.pxi", line 2131, in pandas._libs.hashtable.Int64HashTable.get_item
File "pandas\_libs\hashtable_class_helper.pxi", line 2140, in pandas._libs.hashtable.Int64HashTable.get_item
KeyError: 0
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:\Data\GitCodes\3cp changes\3cp_test\scripts\utils\yield_sheet_3cp_utils\report_generator_3cp.py", line 98, in get_dpr_report_format
df_format[df_format['day'] == latest_day]['Pure_Production_Day_Nia_DPR'][0],
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\series.py", line 942, in __getitem__
return self._get_value(key)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\series.py", line 1051, in _get_value
loc = self.index.get_loc(label)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexes\base.py", line 3363, in get_loc
raise KeyError(key) from err
KeyError: 0
2023-03-23 22:52:49 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 22:54:52 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 22:55:08 - ERROR - [MainThread:get_dpr_report_format():146] - Exception - 0
Traceback (most recent call last):
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexes\base.py", line 3361, in get_loc
return self._engine.get_loc(casted_key)
File "pandas\_libs\index.pyx", line 76, in pandas._libs.index.IndexEngine.get_loc
File "pandas\_libs\index.pyx", line 108, in pandas._libs.index.IndexEngine.get_loc
File "pandas\_libs\hashtable_class_helper.pxi", line 2131, in pandas._libs.hashtable.Int64HashTable.get_item
File "pandas\_libs\hashtable_class_helper.pxi", line 2140, in pandas._libs.hashtable.Int64HashTable.get_item
KeyError: 0
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:\Data\GitCodes\3cp changes\3cp_test\scripts\utils\yield_sheet_3cp_utils\report_generator_3cp.py", line 98, in get_dpr_report_format
df_format[df_format['day'] == latest_day]['Pure_Production_Day_Nia_DPR'][0],
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\series.py", line 942, in __getitem__
return self._get_value(key)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\series.py", line 1051, in _get_value
loc = self.index.get_loc(label)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexes\base.py", line 3363, in get_loc
raise KeyError(key) from err
KeyError: 0
2023-03-23 22:56:34 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 22:57:16 - ERROR - [MainThread:get_dpr_report_format():146] - Exception - Must have equal len keys and value when setting with an ndarray
Traceback (most recent call last):
File "E:\Data\GitCodes\3cp changes\3cp_test\scripts\utils\yield_sheet_3cp_utils\report_generator_3cp.py", line 142, in get_dpr_report_format
df_report_format.iloc[1:4, :10] = data_1
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexing.py", line 723, in __setitem__
iloc._setitem_with_indexer(indexer, value, self.name)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexing.py", line 1730, in _setitem_with_indexer
self._setitem_with_indexer_split_path(indexer, value, name)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexing.py", line 1769, in _setitem_with_indexer_split_path
self._setitem_with_indexer_2d_value(indexer, value)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexing.py", line 1829, in _setitem_with_indexer_2d_value
raise ValueError(
ValueError: Must have equal len keys and value when setting with an ndarray
2023-03-23 22:57:34 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 23:02:10 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 23:02:14 - ERROR - [MainThread:get_dpr_report_format():147] - Exception - Must have equal len keys and value when setting with an iterable
Traceback (most recent call last):
File "E:\Data\GitCodes\3cp changes\3cp_test\scripts\utils\yield_sheet_3cp_utils\report_generator_3cp.py", line 143, in get_dpr_report_format
df_report_format.iloc[index+1, :10] = data_1[index]
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexing.py", line 723, in __setitem__
iloc._setitem_with_indexer(indexer, value, self.name)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexing.py", line 1730, in _setitem_with_indexer
self._setitem_with_indexer_split_path(indexer, value, name)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexing.py", line 1808, in _setitem_with_indexer_split_path
raise ValueError(
ValueError: Must have equal len keys and value when setting with an iterable
2023-03-23 23:03:54 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 23:04:13 - ERROR - [MainThread:get_dpr_report_format():147] - Exception - could not broadcast input array from shape (4,10) into shape (0,10)
Traceback (most recent call last):
File "E:\Data\GitCodes\3cp changes\3cp_test\scripts\utils\yield_sheet_3cp_utils\report_generator_3cp.py", line 144, in get_dpr_report_format
df_report_format.iloc[4:, :10] = data_2
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexing.py", line 723, in __setitem__
iloc._setitem_with_indexer(indexer, value, self.name)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexing.py", line 1732, in _setitem_with_indexer
self._setitem_single_block(indexer, value, name)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexing.py", line 1968, in _setitem_single_block
self.obj._mgr = self.obj._mgr.setitem(indexer=indexer, value=value)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\internals\managers.py", line 355, in setitem
return self.apply("setitem", indexer=indexer, value=value)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\internals\managers.py", line 327, in apply
applied = getattr(b, f)(**kwargs)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\internals\blocks.py", line 984, in setitem
values[indexer] = value
ValueError: could not broadcast input array from shape (4,10) into shape (0,10)
2023-03-23 23:05:30 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 23:05:30 - ERROR - [MainThread:yield_report_3cp():73] - Exception occurred - unknown opcode
Traceback (most recent call last):
File "E:\Data\GitCodes\3cp changes\3cp_test\scripts\utils\yield_sheet_3cp_utils\report_generator_3cp.py", line 148, in get_dpr_report_format
except Exception as e:
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexing.py", line 723, in __setitem__
iloc._setitem_with_indexer(indexer, value, self.name)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexing.py", line 1732, in _setitem_with_indexer
self._setitem_single_block(indexer, value, name)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexing.py", line 1968, in _setitem_single_block
self.obj._mgr = self.obj._mgr.setitem(indexer=indexer, value=value)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\internals\managers.py", line 355, in setitem
return self.apply("setitem", indexer=indexer, value=value)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\internals\managers.py", line 327, in apply
applied = getattr(b, f)(**kwargs)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\internals\blocks.py", line 984, in setitem
values[indexer] = value
ValueError: could not broadcast input array from shape (4,10) into shape (0,10)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "E:\Data\GitCodes\3cp changes\3cp_test\scripts\utils\yield_sheet_3cp_utils\report_generator_3cp.py", line 66, in yield_report_3cp
df_report = get_dpr_report_format(df=df)
File "E:\Data\GitCodes\3cp changes\3cp_test\scripts\utils\yield_sheet_3cp_utils\report_generator_3cp.py", line 149, in get_dpr_report_format
logger.exception(f'Exception - {e}')
SystemError: unknown opcode
2023-03-23 23:05:46 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 23:06:18 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 23:06:54 - ERROR - [MainThread:get_dpr_report_format():145] - Exception - 'Date'
Traceback (most recent call last):
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexes\base.py", line 3361, in get_loc
return self._engine.get_loc(casted_key)
File "pandas\_libs\index.pyx", line 76, in pandas._libs.index.IndexEngine.get_loc
File "pandas\_libs\index.pyx", line 108, in pandas._libs.index.IndexEngine.get_loc
File "pandas\_libs\hashtable_class_helper.pxi", line 5198, in pandas._libs.hashtable.PyObjectHashTable.get_item
File "pandas\_libs\hashtable_class_helper.pxi", line 5206, in pandas._libs.hashtable.PyObjectHashTable.get_item
KeyError: 'Date'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:\Data\GitCodes\3cp changes\3cp_test\scripts\utils\yield_sheet_3cp_utils\report_generator_3cp.py", line 81, in get_dpr_report_format
df_format['Date'] = pd.to_datetime(df_format['Date'])
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\frame.py", line 3457, in __getitem__
return self._getitem_multilevel(key)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\frame.py", line 3508, in _getitem_multilevel
loc = self.columns.get_loc(key)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexes\multi.py", line 2922, in get_loc
loc = self._get_level_indexer(key, level=0)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexes\multi.py", line 3204, in _get_level_indexer
idx = self._get_loc_single_level_index(level_index, key)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexes\multi.py", line 2855, in _get_loc_single_level_index
return level_index.get_loc(key)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexes\base.py", line 3363, in get_loc
raise KeyError(key) from err
KeyError: 'Date'
2023-03-23 23:08:33 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 23:10:57 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 23:11:25 - ERROR - [MainThread:get_dpr_report_format():147] - Exception - cannot handle a non-unique multi-index!
Traceback (most recent call last):
File "E:\Data\GitCodes\3cp changes\3cp_test\scripts\utils\yield_sheet_3cp_utils\report_generator_3cp.py", line 80, in get_dpr_report_format
df_format = df_format[df_format['Date'] != 'Total']
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\frame.py", line 3457, in __getitem__
return self._getitem_multilevel(key)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\frame.py", line 3513, in _getitem_multilevel
result = self.reindex(columns=new_columns)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\util\_decorators.py", line 324, in wrapper
return func(*args, **kwargs)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\frame.py", line 4772, in reindex
return super().reindex(**kwargs)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\generic.py", line 4818, in reindex
return self._reindex_axes(
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\frame.py", line 4591, in _reindex_axes
frame = frame._reindex_columns(
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\frame.py", line 4633, in _reindex_columns
new_columns, indexer = self.columns.reindex(
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\core\indexes\multi.py", line 2524, in reindex
raise ValueError("cannot handle a non-unique multi-index!")
ValueError: cannot handle a non-unique multi-index!
2023-03-23 23:14:32 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 23:41:44 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 23:41:44 - ERROR - [MainThread:get_dpr():240] - Exception - [Errno 13] Permission denied: 'test_prod.xlsx'
Traceback (most recent call last):
File "E:\Data\GitCodes\3cp changes\3cp_test\scripts\utils\yield_sheet_3cp_utils\report_generator_3cp.py", line 152, in get_dpr
writer = pd.ExcelWriter(master_output_file, engine='xlsxwriter')
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\io\excel\_xlsxwriter.py", line 191, in __init__
super().__init__(
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\io\excel\_base.py", line 925, in __init__
self.handles = get_handle(
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\io\common.py", line 711, in get_handle
handle = open(handle, ioargs.mode)
PermissionError: [Errno 13] Permission denied: 'test_prod.xlsx'
2023-03-23 23:42:12 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 23:45:29 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 23:45:45 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-23 23:45:58 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 00:26:46 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 00:27:34 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 00:28:18 - ERROR - [MainThread:get_dpr():245] - Exception - 'Worksheet' object has no attribute 'add_format'
Traceback (most recent call last):
File "E:\Data\GitCodes\3cp changes\3cp_test\scripts\utils\yield_sheet_3cp_utils\report_generator_3cp.py", line 217, in get_dpr
cell_format = worksheet_format.add_format({'align': 'center', 'valign': 'vcenter', 'border': 1})
AttributeError: 'Worksheet' object has no attribute 'add_format'
2023-03-24 00:32:13 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 00:32:16 - ERROR - [MainThread:get_dpr():245] - Exception - 'Worksheet' object has no attribute 'add_format'
Traceback (most recent call last):
File "E:\Data\GitCodes\3cp changes\3cp_test\scripts\utils\yield_sheet_3cp_utils\report_generator_3cp.py", line 221, in get_dpr
format6 = worksheet_format.add_format({'bg_color': 'yellow', 'text_wrap': True})
AttributeError: 'Worksheet' object has no attribute 'add_format'
2023-03-24 00:33:21 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 00:36:05 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 00:38:00 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 00:38:00 - ERROR - [MainThread:get_dpr():235] - Exception - [Errno 13] Permission denied: 'test_prod.xlsx'
Traceback (most recent call last):
File "E:\Data\GitCodes\3cp changes\3cp_test\scripts\utils\yield_sheet_3cp_utils\report_generator_3cp.py", line 153, in get_dpr
writer = pd.ExcelWriter(master_output_file, engine='xlsxwriter')
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\io\excel\_xlsxwriter.py", line 191, in __init__
super().__init__(
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\io\excel\_base.py", line 925, in __init__
self.handles = get_handle(
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\io\common.py", line 711, in get_handle
handle = open(handle, ioargs.mode)
PermissionError: [Errno 13] Permission denied: 'test_prod.xlsx'
2023-03-24 00:38:05 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 00:39:06 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 00:39:47 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 00:44:33 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 00:48:54 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 00:51:14 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 00:59:38 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 01:03:01 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 01:06:25 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 01:12:19 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 01:13:06 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 01:14:26 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 01:15:31 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 01:16:55 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 01:20:19 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 01:23:09 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 01:23:45 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 01:29:48 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 01:30:14 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 01:31:51 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 01:32:23 - INFO - [MainThread:yield_report_3cp():36] -
2023-03-24 01:40:44 - INFO - [MainThread:yield_report_3cp():37] -
2023-03-24 01:40:44 - ERROR - [MainThread:get_dpr():254] - Exception - [Errno 13] Permission denied: 'test_prod.xlsx'
Traceback (most recent call last):
File "E:\Data\GitCodes\3cp changes\3cp_test\scripts\utils\yield_sheet_3cp_utils\report_generator_3cp.py", line 154, in get_dpr
writer = pd.ExcelWriter(master_output_file, engine='xlsxwriter')
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\io\excel\_xlsxwriter.py", line 191, in __init__
super().__init__(
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\io\excel\_base.py", line 925, in __init__
self.handles = get_handle(
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\io\common.py", line 711, in get_handle
handle = open(handle, ioargs.mode)
PermissionError: [Errno 13] Permission denied: 'test_prod.xlsx'
2023-03-24 01:40:53 - INFO - [MainThread:yield_report_3cp():37] -
2023-03-24 01:41:22 - INFO - [MainThread:yield_report_3cp():37] -
2023-03-24 01:41:22 - ERROR - [MainThread:get_dpr():254] - Exception - [Errno 13] Permission denied: 'test_prod.xlsx'
Traceback (most recent call last):
File "E:\Data\GitCodes\3cp changes\3cp_test\scripts\utils\yield_sheet_3cp_utils\report_generator_3cp.py", line 154, in get_dpr
writer = pd.ExcelWriter(master_output_file, engine='xlsxwriter')
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\io\excel\_xlsxwriter.py", line 191, in __init__
super().__init__(
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\io\excel\_base.py", line 925, in __init__
self.handles = get_handle(
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\pandas\io\common.py", line 711, in get_handle
handle = open(handle, ioargs.mode)
PermissionError: [Errno 13] Permission denied: 'test_prod.xlsx'
2023-03-24 01:41:43 - INFO - [MainThread:yield_report_3cp():37] -
2023-03-24 01:44:48 - INFO - [MainThread:yield_report_3cp():37] -
2023-03-24 01:44:49 - ERROR - [MainThread:get_dpr():259] - Exception - not enough values to unpack (expected 2, got 1)
Traceback (most recent call last):
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\xlsxwriter\worksheet.py", line 139, in column_wrapper
int(args[0])
ValueError: invalid literal for int() with base 10: 'A'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "E:\Data\GitCodes\3cp changes\3cp_test\scripts\utils\yield_sheet_3cp_utils\report_generator_3cp.py", line 173, in get_dpr
worksheet.set_column('A', 20, _format)
File "C:\Users\aakash.bedi\Anaconda3\envs\3cp_valid_recommendations\lib\site-packages\xlsxwriter\worksheet.py", line 142, in column_wrapper
cell_1, cell_2 = [col + '1' for col in args[0].split(':')]
ValueError: not enough values to unpack (expected 2, got 1)
2023-03-24 01:45:28 - INFO - [MainThread:yield_report_3cp():37] -
2023-03-24 01:47:14 - INFO - [MainThread:yield_report_3cp():37] -
2023-03-24 01:47:47 - INFO - [MainThread:yield_report_3cp():37] -
2023-03-24 01:48:27 - INFO - [MainThread:yield_report_3cp():37] -
2023-03-24 01:50:38 - INFO - [MainThread:yield_report_3cp():37] -
2023-03-24 01:52:59 - INFO - [MainThread:yield_report_3cp():37] -
2023-03-24 01:55:04 - INFO - [MainThread:yield_report_3cp():37] -
2023-03-24 01:57:47 - INFO - [MainThread:yield_report_3cp():37] -
2023-03-24 01:58:48 - INFO - [MainThread:yield_report_3cp():37] -
2023-03-24 01:59:31 - INFO - [MainThread:yield_report_3cp():37] -
2023-03-24 02:00:59 - INFO - [MainThread:yield_report_3cp():37] -
2023-03-24 02:03:12 - INFO - [MainThread:yield_report_3cp():37] -
2023-03-24 02:03:50 - INFO - [MainThread:yield_report_3cp():37] -
2023-03-24 02:03:50 - ERROR - [MainThread:get_dpr_report_format():149] - Exception - index 0 is out of bounds for axis 0 with size 0
Traceback (most recent call last):
File "E:\Data\GitCodes\3cp changes\3cp_test\scripts\utils\yield_sheet_3cp_utils\report_generator_3cp.py", line 99, in get_dpr_report_format
df_format[df_format['day'] == 1]['Pure_Production_Opening_DPR'].values[0],
IndexError: index 0 is out of bounds for axis 0 with size 0
2023-03-24 02:03:50 - ERROR - [MainThread:get_dpr():259] - Exception - cannot unpack non-iterable NoneType object
Traceback (most recent call last):
File "E:\Data\GitCodes\3cp changes\3cp_test\scripts\utils\yield_sheet_3cp_utils\report_generator_3cp.py", line 156, in get_dpr
df_report_format, last_date = get_dpr_report_format(df=df_format)
TypeError: cannot unpack non-iterable NoneType object
2023-03-24 02:04:50 - INFO - [MainThread:yield_report_3cp():37] -
...@@ -43,6 +43,11 @@ class DB: ...@@ -43,6 +43,11 @@ class DB:
uri = config["KAIROS_DB"]["uri"] uri = config["KAIROS_DB"]["uri"]
class Logging:
level = config.get("LOGGING", "level", fallback="INFO")
level = level or "INFO"
tb_flag = config.getboolean("LOGGING", "traceback", fallback=True)
tb_flag = tb_flag or True
......
...@@ -21,7 +21,7 @@ class Kairos_query: ...@@ -21,7 +21,7 @@ class Kairos_query:
"tags": { "tags": {
"c3": tag "c3": tag
}, },
"name": "project_227__ilens.live_data.raw", "name": "ilens.live_data.raw",
"group_by": [ "group_by": [
{ {
"name": "tag", "name": "tag",
...@@ -32,7 +32,7 @@ class Kairos_query: ...@@ -32,7 +32,7 @@ class Kairos_query:
{ {
"name": "last", "name": "last",
"sampling": { "sampling": {
"value": "1", "value": "2",
"unit": "minutes" "unit": "minutes"
} }
} }
...@@ -52,7 +52,6 @@ class Kairos_query: ...@@ -52,7 +52,6 @@ class Kairos_query:
output = {} output = {}
try: try:
response = requests.post(self.kairos_url, data=json.dumps(query)) response = requests.post(self.kairos_url, data=json.dumps(query))
abb = response.json()
grouped_output_data = response.json()["queries"][0]["results"] grouped_output_data = response.json()["queries"][0]["results"]
for each_grouped_data in grouped_output_data: for each_grouped_data in grouped_output_data:
value = (each_grouped_data["values"]) value = (each_grouped_data["values"])
...@@ -63,11 +62,10 @@ class Kairos_query: ...@@ -63,11 +62,10 @@ class Kairos_query:
else: else:
for k, v in tags_dict.items(): for k, v in tags_dict.items():
output[v] = 0 output[v] = 0
# return output
try: try:
output[tags_dict[tag_id]] = round(value[0][1], 2) output[tags_dict[tag_id]] = round(value[0][1], 2)
except Exception as e: except Exception as e:
logger.exception(f"Exception occurred for tag = {tag_id} and date = {date}", exc_info=True) logger.exception(f"Exception - {e}", exc_info=True)
output[tags_dict[tag_id]] = 0 output[tags_dict[tag_id]] = 0
return output return output
except Exception as e: except Exception as e:
......
...@@ -5,53 +5,85 @@ from loguru import logger ...@@ -5,53 +5,85 @@ from loguru import logger
from scripts.utils.yield_sheet_3cp_utils.data_puller_3cp import Kairos_query from scripts.utils.yield_sheet_3cp_utils.data_puller_3cp import Kairos_query
class ManualDcsData: class AllTagsDataPuller:
def __init__(self, start_date, end_date, tags_cal, tags_cal_prev, tags_manual, tags_dcs): def __init__(self, start_date, end_date):
self.start_date = start_date self.start_date = start_date
self.end_date = end_date self.end_date = end_date
self.tags_cal = tags_cal
self.tags_cal_prev = tags_cal_prev
self.tags_manual = tags_manual
self.tags_dcs = tags_dcs
def manual_dcs_dataframe(self): def get_kairos_data(self, tags_lst, tags_dict):
try: try:
all_manual_dcs_tags_dict, all_cal_tags_dict = TagsDict().all_tags(self.tags_cal, self.tags_cal_prev,
self.tags_manual, self.tags_dcs)
all_manual_dcs_tags = list(all_manual_dcs_tags_dict.values())
# logger.info(f"No of manual and dcs tags = {len(all_manual_dcs_tags)}")
# All calculated tags combined
all_calculated_tags = list(all_cal_tags_dict.values())
# logger.info(f"No of all calculated tags = {len(all_calculated_tags)}")
all_tags = [*all_calculated_tags, *all_manual_dcs_tags]
all_tags_dict = {**all_cal_tags_dict, **all_manual_dcs_tags_dict}
# All dates for which we are going to do calculations
all_dates = [self.start_date + timedelta(days=x) for x in range((self.end_date - self.start_date).days + 1)] all_dates = [self.start_date + timedelta(days=x) for x in range((self.end_date - self.start_date).days + 1)]
# all_dates.insert(0, self.start_date)
logger.debug(f"Data required for dates : {all_dates}") logger.debug(f"Data required for dates : {all_dates}")
# Pulling data for all manual and dcs dates for required dates
date_output = {} date_output = {}
for dates in all_dates: for dates in all_dates:
try: periodic_start_time = dates - timedelta(minutes=0)
periodic_start_time = dates logger.info(f"Pulling data for date - {periodic_start_time}")
logger.info(f"Pulling Manual and DCS data for date - {periodic_start_time}") current_date = periodic_start_time
current_date = periodic_start_time periodic_start_time = int(periodic_start_time.timestamp()) * 1000
periodic_start_time = int(periodic_start_time.timestamp()) * 1000 periodic_end_time = dates + timedelta(minutes=10)
periodic_end_time = int(dates.timestamp()) * 1000 periodic_end_time = int(periodic_end_time.timestamp()) * 1000
query_manual_dcs = Kairos_query().kairos_query(start=periodic_start_time, end=periodic_end_time, query_manual_dcs = Kairos_query().kairos_query(start=periodic_start_time, end=periodic_end_time,
tag=all_manual_dcs_tags) tag=tags_lst)
logger.info(f"{query_manual_dcs}") logger.info(f"{query_manual_dcs}")
data_manual_dcs = Kairos_query().get_data_from_kairos(query=query_manual_dcs, data_manual_dcs = Kairos_query().get_data_from_kairos(query=query_manual_dcs,
tags_dict=dict((v, k) tags_dict=dict((v, k) for k, v in
for k, v in all_manual_dcs_tags_dict.items()), tags_dict.items()),
date=current_date) date=current_date)
date_output[current_date] = data_manual_dcs
except Exception as e: date_output[dates] = data_manual_dcs
logger.exception(f"Exception occurred", exc_info=True)
return date_output return date_output
except Exception as e: except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True) logger.exception(f'Exception - {e}')
\ No newline at end of file
def get_dataframe(date_dict, tags_lst):
try:
df = pd.DataFrame(index=[i for i in range(len(date_dict))], columns=tags_lst)
df['Date'] = list(date_dict.keys())
col = df.pop("Date")
df.insert(0, col.name, col)
for index in range(df.shape[0]):
for params in df.columns:
if params not in ['Date']:
present_date = df.iloc[index, df.columns.get_loc('Date')]
available_tags = list(date_dict[present_date].keys())
if params not in available_tags:
df.iloc[index, df.columns.get_loc(params)] = None
else:
df.iloc[index, df.columns.get_loc(params)] = date_dict[present_date][params]
df.sort_values('Date', inplace=True)
df.reset_index(drop=True, inplace=True)
df = df.astype({'Date': str,
'7302011030_Consumptions_Closing_DPR': float, 'D1D001Readings_T_2703_A_DPR': float,
'D1D001Readings_T_2703_B_DPR': float, 'D1D001Readings_Conv_DPR': float,
'D1D001_consumptions_Day_Receipt_DPR': float, '7302011030_Consumptions_Day_Receipt_DPR': float,
'7302011061_Consumption_Day_Receipt_DPR': float, '7302011061_Consumption_Day_Cons_DPR': float,
'Pure_Production_Day_Nia_DPR': float, 'Pure_Production_Day_Drum_Filling_DPR': float,
'Pure_Production_Pure_tank_Dead_Volumes_DPR': float, 'Utility_report_Day_Power': float,
'Utility_report_Day_Steam': float, 'Utility_report_Day_Raffinate': float,
'Utility_report_Vent_Gas_Raffinate': float, 'Utility_report_Day_DM': float,
'Utility_report_Day_Treated_Water': float, 'Utility_report_Raffinate_Incinerated': float,
'Beta_Purification_Column_C_2409_Outlet_Flow_TZ': float, 'D1D001Readings_T_2101_A_DPR': float,
'D1D001Readings_T_2101_B_DPR': float, 'Pure_Production_LT_2701_A_DPR': float,
'Pure_Production_LT_2701_B_DPR': float, 'D1D001_consumptions_Opening_DPR': float,
'D1D001_consumptions_Day_Cons_DPR': float, '7302011030_Consumptions_Opening_DPR': float,
'7302011030_Consumptions_Day_Cons_DPR': float, '7302011061_Consumption_Opening_DPR': float,
'Crude_Prod_Day_Prod_DPR': float, 'Pure_Production_Opening_DPR': float,
'Pure_Production_Day_Prod_DPR': float, 'Utility_report_Power_Norms': float,
'Utility_report_Steam_Norms': float, 'Utility_report_Raffinate_Norms': float,
'Utility_report_Raffinate_Vent_Gas': float, 'Utility_report_Raw_Water_Norms': float,
'Utility_report_per_hr_burn_rate': float, 'D1D001_consumptions_Closing_DPR': float,
'Utility_report_Actual_Ammonia_Norms': float, 'Utility_report_Actual_Beta_Norms': float,
'Utility_report_Actual_Benzene_Norms': float, 'Utility_report_Day_DM_norm': float,
'D1D001_consumptions_Day_Recovery_DPR': float, 'D1D001_consumptions_Total_Receipt_DPR': float,
'7302011030_Consumptions_Total_Receipt_DPR': float,
'7302011030_Consumptions_Total_Cons_DPR': float,
'7302011061_Consumption_Closing_DPR': float, '7302011061_Consumption_Total_Receipt_DPR': float,
'7302011061_Consumption_Total_Cons_DPR': float, 'Crude_Prod_Total_Prod_DPR': float,
'Pure_Production_Closing_of_Pure_Tanks_only_DPR': float,
'Pure_Production_Total_Prod_DPR': float, 'Pure_Production_Total_Nia_DPR': float,
'Pure_Production_Total_Drum_Filling_DPR': float})
return df
except Exception as e:
logger.exception(f'Exception - {e}')
\ No newline at end of file
...@@ -3,8 +3,8 @@ from loguru import logger ...@@ -3,8 +3,8 @@ from loguru import logger
class ReorderRename: class ReorderRename:
def __init__(self,df_ebpr): def __init__(self, df_ebpr):
self.df_ebpr=df_ebpr self.df_ebpr = df_ebpr
def reorder_rename(self): def reorder_rename(self):
try: try:
df1 = self.df_ebpr[['Date', df1 = self.df_ebpr[['Date',
......
...@@ -2,18 +2,16 @@ import numpy as np ...@@ -2,18 +2,16 @@ import numpy as np
import warnings import warnings
warnings.filterwarnings("ignore") warnings.filterwarnings("ignore")
import traceback import traceback
from loguru import logger import xlsxwriter
from scripts.logging import logger
import pandas as pd import pandas as pd
from datetime import datetime, timedelta from datetime import datetime, timedelta
from scripts.utils.yield_sheet_3cp_utils.all_tags_3cp import TagsDict from scripts.utils.yield_sheet_3cp_utils.all_tags_3cp import TagsDict
enable_traceback = True enable_traceback = True
from scripts.utils.yield_sheet_3cp_utils.manual_dcs_dataframe import ManualDcsData from scripts.utils.yield_sheet_3cp_utils.manual_dcs_dataframe import AllTagsDataPuller, get_dataframe
from scripts.utils.yield_sheet_3cp_utils.unavailable_manual_dcs_tags import UnavailableManualDcsTags
from scripts.utils.yield_sheet_3cp_utils.ebpr_manual_dataframe import EbprManualDcsDataframe
from scripts.utils.yield_sheet_3cp_utils.calculated_dataframe import CalculatedDataframe
from scripts.utils.yield_sheet_3cp_utils.concat_manual_calculated_dataframe import ConcatenateManualCalculatedDf
from scripts.utils.yield_sheet_3cp_utils.reorder_renaming_ebpr import ReorderRename from scripts.utils.yield_sheet_3cp_utils.reorder_renaming_ebpr import ReorderRename
class ReportGenerator: class ReportGenerator:
def __init__(self, tags_cal, tags_cal_prev, tags_manual, tags_dcs, start_date, end_date): def __init__(self, tags_cal, tags_cal_prev, tags_manual, tags_dcs, start_date, end_date):
self.tags_cal = tags_cal self.tags_cal = tags_cal
...@@ -25,89 +23,237 @@ class ReportGenerator: ...@@ -25,89 +23,237 @@ class ReportGenerator:
def yield_report_3cp(self): def yield_report_3cp(self):
try: try:
date_output = ManualDcsData(start_date=self.start_date, end_date=self.end_date, tags_cal=self.tags_cal,
tags_cal_prev=self.tags_cal_prev, tags_manual=self.tags_manual,
tags_dcs=self.tags_dcs).manual_dcs_dataframe()
all_manual_dcs_tags_dict, all_cal_tags_dict = TagsDict().all_tags(self.tags_cal, self.tags_cal_prev, all_manual_dcs_tags_dict, all_cal_tags_dict = TagsDict().all_tags(self.tags_cal, self.tags_cal_prev,
self.tags_manual, self.tags_dcs) self.tags_manual, self.tags_dcs)
all_manual_dcs_tags = list(all_manual_dcs_tags_dict.values()) all_tags_dict = {**all_manual_dcs_tags_dict, **all_cal_tags_dict}
all_tags = list(all_tags_dict.values())
all_calculated_tags = list(all_cal_tags_dict.values())
all_tags_data_puller = AllTagsDataPuller(start_date=self.start_date, end_date=self.end_date)
all_tags = [*all_calculated_tags, *all_manual_dcs_tags] manual_dcs_input = all_tags_data_puller.get_kairos_data(tags_dict=all_tags_dict,
all_tags_dict = {**all_cal_tags_dict, **all_manual_dcs_tags_dict} tags_lst=all_tags)
unavailable_tags, unavailable_data_date = UnavailableManualDcsTags(date_output=date_output, df = get_dataframe(date_dict=manual_dcs_input, tags_lst=list(all_tags_dict.keys()))
all_manual_dcs_tags=all_manual_dcs_tags, logger.info(f'')
all_manual_dcs_tags_dict=all_manual_dcs_tags_dict,
first_date=self.start_date).unavailable_manual_dcs_tags() for col in [col for col in list(df.columns) if col not in ('Date',
if len(unavailable_data_date)>0: 'Utility_report_Power_Norms',
logger.info(f"No data is available for - {unavailable_data_date}") 'Utility_report_Steam_Norms',
return None, f"No data is available for - {unavailable_data_date}" 'Utility_report_Raffinate_Norms',
else: 'Utility_report_Raffinate_Vent_Gas',
logger.info(f"Given tags are missing for dates - {unavailable_tags}") 'Utility_report_Raw_Water_Norms',
unavailable_tags.pop(self.start_date, None) 'Utility_report_Actual_Ammonia_Norms',
if len(unavailable_tags)>=1: 'Utility_report_Actual_Beta_Norms',
logger.info(f"Given tags are missing for Date - {unavailable_tags}") 'Utility_report_Day_DM_norm',
return None, f"Given tags are missing for Date - {unavailable_tags}" 'Utility_report_Actual_Benzene_Norms')]:
else: df.loc['Total', col] = df[col].sum()
df_manual_dcs, df_manual_dcs_ebpr, date_output_filtered = EbprManualDcsDataframe(date_output=date_output,
all_manual_dcs_tags=all_manual_dcs_tags).\ df['Date'][-1] = 'Total'
ebpr_manual_dcs_dataframe() average_list = []
for col in [col for col in list(df.columns) if col in ('Utility_report_Power_Norms',
df_calculated_ebpr, df_calculated, unavailable_tags_cal = CalculatedDataframe(date_output_filtered= 'Utility_report_Steam_Norms',
date_output_filtered, 'Utility_report_Raffinate_Norms',
all_calculated_tags=all_calculated_tags, 'Utility_report_Raffinate_Vent_Gas',
all_cal_tags_dict=all_cal_tags_dict, 'Utility_report_Raw_Water_Norms',
df_manual_dcs=df_manual_dcs).calculated_df() 'Utility_report_Actual_Ammonia_Norms',
'Utility_report_Actual_Beta_Norms',
df_ebpr = ConcatenateManualCalculatedDf(df_manual_dcs_ebpr=df_manual_dcs_ebpr, 'Utility_report_Day_DM_norm',
df_calculated_ebpr=df_calculated_ebpr, 'Utility_report_Actual_Benzene_Norms')]:
start_date=self.start_date).concate_manual_calculated_df() df.loc['Average', col] = df[col].mean()
df_ebpr_copy = df_ebpr.copy()
df_ebpr.replace([np.inf, -np.inf], np.nan, inplace=True) df['Date'][-1] = 'Average'
# a = df_ebpr['Utility_report_Power_Norms'].unique() df = df.round(3)
# col_name = df_ebpr.columns[1:]
# for col in col_name: df_concat = ReorderRename(df_ebpr=df).reorder_rename()
# df_ebpr[col] = df_ebpr[col].replace([inf], 'nan') df_concat.replace({'inf': 'nan'}, inplace=True)
total_list = [] return df, df_concat, f"Report is ready"
for col in [col for col in list(df_ebpr.columns) if col not in ('Date',
'Utility_report_Power_Norms',
'Utility_report_Steam_Norms',
'Utility_report_Raffinate_Norms',
'Utility_report_Raffinate_Vent_Gas',
'Utility_report_Raw_Water_Norms',
'Utility_report_Actual_Ammonia_Norms',
'Utility_report_Actual_Beta_Norms',
'Utility_report_Day_DM_norm',
'Utility_report_Actual_Benzene_Norms')]:
# total_list.append(df_ebpr[col].sum())
df_ebpr.loc['Total', col] = df_ebpr[col].sum()
# df_ebpr.loc['Total'] = [i for i in range(df_ebpr.shape[1])]
# df_ebpr.iloc[-1, 1:] = total_list
df_ebpr['Date'][-1] = 'Total'
average_list = []
for col in [col for col in list(df_ebpr.columns) if col in ('Utility_report_Power_Norms',
'Utility_report_Steam_Norms',
'Utility_report_Raffinate_Norms',
'Utility_report_Raffinate_Vent_Gas',
'Utility_report_Raw_Water_Norms',
'Utility_report_Actual_Ammonia_Norms',
'Utility_report_Actual_Beta_Norms',
'Utility_report_Day_DM_norm',
'Utility_report_Actual_Benzene_Norms')]:
df_ebpr.loc['Average', col] = df_ebpr[col].mean()
# average_list.append(df_ebpr[col].mean())
# df_ebpr.loc['Average'] = [i for i in range(df_ebpr.shape[1])]
# df_ebpr.iloc[-1, 1:] = average_list
df_ebpr['Date'][-1] = 'Average'
df_ebpr = df_ebpr.round(3)
df_concat = ReorderRename(df_ebpr=df_ebpr).reorder_rename()
df_concat.replace({'inf': 'nan'}, inplace=True)
return df_concat, f"Report is ready"
except Exception as e: except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True) logger.exception(f"Exception occurred - {e}", exc_info=True)
return None, f"Error - {e}" return None, f"Error - {e}"
def get_dpr_report_format(df):
try:
df_format = df.copy()
df_format = df_format.drop(['Total', 'Average'])
df_format['Date'] = pd.to_datetime(df_format['Date'])
df_format['month'] = df_format['Date'].dt.month
df_format['day'] = df_format['Date'].dt.day
last_date = df_format.iloc[-1, df_format.columns.get_loc('Date')].date()
latest_month = max(list(df_format['month'].unique()))
df_format = df_format[df_format['month'] == latest_month]
df_format.reset_index(drop=True, inplace=True)
latest_day = max(list(df_format['day'].unique()))
data_0 = [[last_date, last_date, last_date, last_date, last_date,
last_date, last_date, last_date, last_date, last_date]]
data_1 = [['Production', 'UOM', "Month's opening", "Day Production", "To date Production",
"Day Nia Transfer", "To date Nia Transfer", "Day 3CP Drum Filling Dispatch",
"To Date 3CP Drum Filling Dispatch", "Closing Stock"],
["Pure 3CP", "MT",
df_format[df_format['day'] == 1]['Pure_Production_Opening_DPR'].values[0],
df_format[df_format['day'] == latest_day]['Pure_Production_Day_Prod_DPR'].values[0],
df_format[df_format['day'] == latest_day]['Pure_Production_Total_Prod_DPR'].values[0],
df_format[df_format['day'] == latest_day]['Pure_Production_Day_Nia_DPR'].values[0],
df_format[df_format['day'] == latest_day]['Pure_Production_Total_Nia_DPR'].values[0], None, None,
df_format[df_format['day'] == latest_day]['Pure_Production_Closing_of_Pure_Tanks_only_DPR'].values[0]],
['Crude 3CP', 'MT', None,
df_format[df_format['day'] == latest_day]['Crude_Prod_Day_Prod_DPR'].values[0],
df_format[df_format['day'] == latest_day]['Crude_Prod_Total_Prod_DPR'].values[0],
None, None, None, None, None]]
data_2 = [
['ITEM CODE', 'Raw Material', 'UOM', "Month's Opening", 'Day Opening Stock', 'Day Receipt',
'To Date Receipt', 'Day Consumption', 'To Date Consumption', 'Closing Stock'],
['F00040-BULK-0001', 'Beta Picoline', 'MT',
df_format[df_format['day'] == 1]['D1D001_consumptions_Opening_DPR'].values[0],
df_format[df_format['day'] == latest_day]['D1D001_consumptions_Opening_DPR'].values[0],
df_format[df_format['day'] == latest_day]['D1D001_consumptions_Day_Receipt_DPR'].values[0],
df_format[df_format['day'] == latest_day]['D1D001_consumptions_Total_Receipt_DPR'].values[0],
df_format[df_format['day'] == latest_day]['Crude_Prod_Day_Prod_DPR'].values[0]*1.04,
df_format[df_format['day'] == 1]['D1D001_consumptions_Opening_DPR'].values[0] +
df_format[df_format['day'] == latest_day]['D1D001_consumptions_Total_Receipt_DPR'].values[0] -
df_format[df_format['day'] == latest_day]['D1D001_consumptions_Closing_DPR'].values[0],
df_format[df_format['day'] == latest_day]['D1D001_consumptions_Closing_DPR'].values[0]
],
['7302011030', 'Ammonia', 'MT',
df_format[df_format['day'] == 1]['7302011030_Consumptions_Opening_DPR'].values[0],
df_format[df_format['day'] == latest_day]['7302011030_Consumptions_Opening_DPR'].values[0],
df_format[df_format['day'] == latest_day]['D1D001_consumptions_Day_Receipt_DPR'].values[0],
df_format[df_format['day'] == latest_day]['7302011030_Consumptions_Total_Receipt_DPR'].values[0],
df_format[df_format['day'] == latest_day]['7302011030_Consumptions_Day_Cons_DPR'].values[0],
df_format[df_format['day'] == latest_day]['7302011030_Consumptions_Total_Cons_DPR'].values[0],
df_format[df_format['day'] == latest_day]['7302011030_Consumptions_Closing_DPR'].values[0]
],
['7302011061', 'Benzene', 'MT',
df_format[df_format['day'] == 1]['7302011061_Consumption_Opening_DPR'].values[0],
df_format[df_format['day'] == latest_day]['7302011061_Consumption_Opening_DPR'].values[0],
df_format[df_format['day'] == latest_day]['7302011061_Consumption_Day_Receipt_DPR'].values[0],
df_format[df_format['day'] == latest_day]['7302011061_Consumption_Total_Receipt_DPR'].values[0],
df_format[df_format['day'] == latest_day]['7302011061_Consumption_Day_Cons_DPR'].values[0],
df_format[df_format['day'] == latest_day]['7302011061_Consumption_Total_Cons_DPR'].values[0],
df_format[df_format['day'] == latest_day]['7302011061_Consumption_Closing_DPR'].values[0]
]
]
final_lst = data_0 + data_1 + data_2
df_report_format = pd.DataFrame(final_lst)
return df_report_format, last_date
except Exception as e:
logger.exception(f'Exception - {e}')
def get_dpr(master_output_file, df, df_format):
try:
writer = pd.ExcelWriter(master_output_file, engine='xlsxwriter')
df.to_excel(writer, sheet_name="DPR Sheet", index=True)
df_report_format, last_date = get_dpr_report_format(df=df_format)
df_report_format.to_excel(writer, sheet_name="Report Format", index=False)
workbook = writer.book
_format = workbook.add_format(
{'font_name': 'Trebuchet MS', 'text_wrap': True, 'bold': 2, 'font_color': "blue"})
_format.set_align('center')
_format.set_align('vcenter')
format1 = workbook.add_format({'font_name': 'Trebuchet MS', 'text_wrap': True})
format1.set_align('center')
format1.set_align('vcenter')
header_footer_format = workbook.add_format({
'text_wrap': True
})
no_of_rows = df.shape[0]
worksheet = writer.sheets["DPR Sheet"]
# set the column width as per your requirement
worksheet.set_column('A:A', 25, _format)
worksheet.set_column('B:F', 15, _format)
worksheet.set_column('G:L', 20, _format)
worksheet.set_column('N:T', 22, _format)
worksheet.set_column('U:Z', 22, _format)
worksheet.set_column('AB:AD', 20, _format)
worksheet.set_column('AE:AM', 22, _format)
worksheet.set_column('AN:AO', 29, _format)
worksheet.set_column('AQ:AU', 27, _format)
worksheet.set_column('AV:BC', 33, _format)
worksheet.set_column('BD:BD', 30, _format)
worksheet.set_column('BE:BH', 27, _format)
worksheet.set_column('AY:AY', 40, _format)
worksheet.set_column('BA:BA', 35, _format)
worksheet.set_column('M:M', 10, _format)
worksheet.set_column('T:T', 10, _format)
worksheet.set_column('AD:AD', 10, _format)
worksheet.set_column('AP:AP', 10, _format)
worksheet.set_column('BK:BK', 10, _format)
worksheet.set_column('AA:AA', 10, _format)
worksheet.set_column('AT:AT', 30, _format)
worksheet.set_column('BA:BA', 25, _format)
format4 = workbook.add_format({'bg_color': 'yellow'})
format5 = workbook.add_format({'text_wrap': True})
worksheet.set_row(0, 28, format5)
worksheet.conditional_format(f'A{no_of_rows + 2}:AP{no_of_rows + 2}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
worksheet.conditional_format(f'AV{no_of_rows + 2}:AZ{no_of_rows + 2}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
worksheet.conditional_format(f'BB{no_of_rows + 2}:BD{no_of_rows + 2}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
worksheet.conditional_format(f'AQ{no_of_rows + 3}:AU{no_of_rows + 3}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
worksheet.conditional_format(f'BA{no_of_rows + 3}:BA{no_of_rows + 3}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
worksheet.conditional_format(f'BE{no_of_rows + 3}:BG{no_of_rows + 3}',
{'type': 'cell', 'criteria': '<=', 'value': 10000000, 'format': format4})
border_fmt = workbook.add_format({'bottom': 2, 'top': 2, 'left': 2, 'right': 2})
worksheet.conditional_format(xlsxwriter.utility.xl_range(0, 0, df_format.shape[0]+4, 58),
{'type': 'no_errors', 'format': border_fmt})
_format1 = workbook.add_format(
{'font_name': 'Trebuchet MS', 'text_wrap': True, 'bold': 2, 'font_color': "black"})
_format1.set_align('center')
_format1.set_align('vcenter')
header_footer_format = workbook.add_format({
'text_wrap': True
})
no_of_rows_format = df_report_format.shape[0]
worksheet_format = writer.sheets["Report Format"]
worksheet_format.set_column('A:J', 15, _format1)
cell_format = workbook.add_format({'align': 'center', 'valign': 'vcenter', 'border': 1, 'bold': True,
'text_wrap': True})
worksheet_format.merge_range('A1:J2', str(last_date), cell_format)
cell_format_headings = workbook.add_format({'align': 'center', 'valign': 'vcenter',
'bold': True, 'text_wrap': True, 'font_color': 'black'})
worksheet_format.set_row(2, 15, cell_format_headings)
worksheet_format.set_row(5, 15, cell_format_headings)
cell_format_content = workbook.add_format({'align': 'center', 'valign': 'vcenter',
'bold': True, 'text_wrap': True, 'font_color': 'blue'})
worksheet_format.set_row(3, 13, cell_format_content)
worksheet_format.set_row(4, 13, cell_format_content)
worksheet_format.set_row(6, 13, cell_format_content)
worksheet_format.set_row(7, 13, cell_format_content)
worksheet_format.set_row(8, 13, cell_format_content)
border_fmt = workbook.add_format({'bottom': 2, 'top': 2, 'left': 2, 'right': 2})
worksheet_format.conditional_format(xlsxwriter.utility.xl_range(0, 0, 8, 9),
{'type': 'no_errors', 'format': border_fmt})
writer.save()
writer.save()
if not master_output_file.endswith('.xlsx'):
master_output_file = master_output_file + '.xlsx'
return master_output_file
except Exception as e:
logger.exception(f'Exception - {e}')
No preview for this file type
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment