Commit 88724804 authored by dasharatha.vamshi's avatar dasharatha.vamshi

added one main

parent 5d708a91
import sys import sys
import warnings import warnings
import mlflow import mlflow
from sklearn import metrics from sklearn import metrics
from sklearn.model_selection import train_test_split from sklearn.model_selection import train_test_split
import math
import traceback
from datetime import datetime
# from scripts.constants.constants import RawConstants import numpy as np
# from scripts.core.model_loader import ModelLoader import pandas as pd
# from scripts.section_utils.bof_section import preprocess_bof_section from loguru import logger
# from scripts.section_utils.extruder_section import preprocess_extruder_section
# from scripts.section_utils.material_section import preprocess_viscosity_section
# from scripts.section_utils.mixer_section import preprocess_mixer_section
# from scripts.section_utils.pickup_section import preprocess_pickup_section
# from scripts.section_utils.sheet_supply_section import preprocess_sheet_section
warnings.filterwarnings("ignore")
import warnings
# from scripts.constants.constants import ExtruderConstants
warnings.filterwarnings("ignore")
import warnings
# from scripts.constants.constants import ViscosityConstants
warnings.filterwarnings("ignore")
import warnings
# from scripts.constants.constants import MixerConstants
warnings.filterwarnings("ignore")
import warnings
# from scripts.constants.constants import PickupConstants
warnings.filterwarnings("ignore")
import warnings
# from scripts.constants.constants import SheetConstants
warnings.filterwarnings("ignore") warnings.filterwarnings("ignore")
def preprocess_sheet_section(df, index_number): def preprocess_sheet_section(df, index_number):
sheet_supply_column = SheetConstants.sheet_supply_column sheet_supply_column = SheetConstants.sheet_supply_column
sheet_supply_df = df[sheet_supply_column] sheet_supply_df = df[sheet_supply_column]
...@@ -724,6 +690,8 @@ def preprocess_viscosity_section(viscosity_df, index_number): ...@@ -724,6 +690,8 @@ def preprocess_viscosity_section(viscosity_df, index_number):
viscosity_df['batch-date'] = 'Batch_' + viscosity_df['Batch No.'].astype(str) + '_' + viscosity_df['date'].astype( viscosity_df['batch-date'] = 'Batch_' + viscosity_df['Batch No.'].astype(str) + '_' + viscosity_df['date'].astype(
str) str)
viscosity_df = viscosity_df[viscosity_df['Index No'] == index_number] viscosity_df = viscosity_df[viscosity_df['Index No'] == index_number]
if viscosity_df.empty:
raise Exception(f"Size Index No {index_number} not found pls check the file")
rubber_cols = ViscosityConstants.rubber_cols rubber_cols = ViscosityConstants.rubber_cols
# Replace '-' with 0 for numerical and float columns # Replace '-' with 0 for numerical and float columns
viscosity_df[rubber_cols] = viscosity_df[rubber_cols].replace('-', 0) viscosity_df[rubber_cols] = viscosity_df[rubber_cols].replace('-', 0)
...@@ -1057,58 +1025,6 @@ def preprocess_extruder_section(df, index_number, vis_df): ...@@ -1057,58 +1025,6 @@ def preprocess_extruder_section(df, index_number, vis_df):
return df_extruder_grouped return df_extruder_grouped
import math
import warnings
import traceback
from datetime import datetime
import numpy as np
import pandas as pd
from loguru import logger
from scripts.constants.constants import BofConstants
warnings.filterwarnings("ignore")
def mixer_section_start_end_time(raw_df, index_no):
try:
mixer_cols = BofConstants.bof_mixer_cols
mixer_df = raw_df[mixer_cols]
mixer_df['Time Stamp'] = pd.to_datetime(mixer_df['Time Stamp'])
mixer_df = mixer_df.sort_values(by='Time Stamp')
numeric_cols = mixer_df.select_dtypes(include=['int', 'float']).columns
# Convert numeric columns to float
mixer_df[numeric_cols] = mixer_df[numeric_cols].astype(float)
mixer_df['day'] = mixer_df['Time Stamp'].dt.date
mixer_df = mixer_df[mixer_df["Size No (INDEX No).3"] == index_no]
mixer_df = mixer_df[mixer_df["Mixing batch number"] != 0]
mixer_df['time_min'] = mixer_df['Time Stamp']
mixer_df['time_max'] = mixer_df['Time Stamp']
aggregation_dict = {
'time_min': 'min',
'time_max': 'max',
}
group_by = ['day', 'Mixing batch number']
df_mixer_grouped = mixer_df.groupby(group_by).agg(aggregation_dict).reset_index()
df_mixer_grouped['mixer_section_time_diff_second'] = df_mixer_grouped['time_max'] - df_mixer_grouped['time_min']
df_mixer_grouped['mixer_section_time_diff_second'] = df_mixer_grouped[
'mixer_section_time_diff_second'].dt.total_seconds()
df_mixer_grouped['batch-date'] = 'Batch_' + df_mixer_grouped['Mixing batch number'].astype(str) + '_' + \
df_mixer_grouped['day'].astype(str)
date_dict = {}
batch_lis = list(df_mixer_grouped['batch-date'].unique())
for each_bt in batch_lis:
df_nw = df_mixer_grouped[df_mixer_grouped['batch-date'] == each_bt]
date_dict[each_bt] = {"start_time": str(list(df_nw['time_min'])[0]),
'end_time': str(list(df_nw['time_max'])[0])}
return date_dict
except Exception as err:
logger.error(f'Error in fetching mixer batch date dictionary: {str(err)}')
logger.error(traceback.format_exc())
raise Exception(str(err))
def return_batch_no_df(raw_df, viscosity_df, date_dict, index_number): def return_batch_no_df(raw_df, viscosity_df, date_dict, index_number):
try: try:
...@@ -1988,8 +1904,8 @@ def start_prediction(raw_path, viscosity_path, index_no, raw_skip_rows, viscosit ...@@ -1988,8 +1904,8 @@ def start_prediction(raw_path, viscosity_path, index_no, raw_skip_rows, viscosit
df_grouped = merged_all_sections(df_sheet_grouped, df_mixer_grouped, df_extruder_grouped, df_bof_grouped, df_grouped = merged_all_sections(df_sheet_grouped, df_mixer_grouped, df_extruder_grouped, df_bof_grouped,
df_pickup_grouped, viscosity_df) df_pickup_grouped, viscosity_df)
load_and_predict(df_grouped, index_no, model_path) # load_and_predict(df_grouped, index_no, model_path)
# model_trainer(df_grouped, index_no, model_path) model_trainer(df_grouped, index_no, model_path)
if __name__ == "__main__": if __name__ == "__main__":
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment