Commit fb84d673 authored by aakash.bedi's avatar aakash.bedi

master table create

parent 3e0db800
__pycache__/
\ No newline at end of file
stages:
- auto-tagging
- deploy
- update
variables:
MYSQL_CONNECTION: "mysql -h $MYSQL_HOST -u $MYSQL_USER -p$MYSQL_PASS "
before_script:
- val=`echo $($MYSQL_CONNECTION -e "SELECT COUNT(*) FROM $VERSION_DB.$DB_TABLE WHERE category='Server' AND type='Service' AND os='docker' AND module_name='$CI_PROJECT_NAME' ") | cut -d " " -f2`
- if [ $val == 0 ]; then $MYSQL_CONNECTION -e "INSERT INTO $VERSION_DB.$DB_TABLE values('Server','Service','$CI_PROJECT_NAME','docker', '2', '0', '0', '0')";fi
- QA=$($MYSQL_CONNECTION -N -e "SELECT qa FROM $VERSION_DB.$DB_TABLE where module_name = '$CI_PROJECT_NAME' AND type = 'Service' AND category = 'Server' AND os = 'docker'")
- DEV=$($MYSQL_CONNECTION -N -e "SELECT dev FROM $VERSION_DB.$DB_TABLE where module_name = '$CI_PROJECT_NAME' AND type = 'Service' AND category = 'Server' AND os = 'docker'")
- UAT=$(mysql -h $MYSQL_HOST -u $MYSQL_USER -p$MYSQL_PASS -N -e "SELECT uat FROM $VERSION_DB.$DB_TABLE where module_name = '$CI_PROJECT_NAME' AND type = 'Service' AND category = 'Server' AND os = 'docker'")
- PROD=$($MYSQL_CONNECTION -N -e "SELECT prod FROM $VERSION_DB.$DB_TABLE where module_name = '$CI_PROJECT_NAME' AND type = 'Service' AND category = 'Server' AND os = 'docker'")
auto-tagging:
stage: auto-tagging
before_script:
- val=`echo $($MYSQL_CONNECTION -e "SELECT COUNT(*) FROM $VERSION_DB.$VERSION_RELEASE_TABLE WHERE module_name='$CI_PROJECT_NAME' ") | cut -d " " -f2`
- if [ $val == 0 ]; then $MYSQL_CONNECTION -N -e "INSERT INTO $VERSION_DB.$VERSION_RELEASE_TABLE values('$CI_PROJECT_NAME', 'iLens', '5', '13', '0', '0')";fi
- ILENS=$($MYSQL_CONNECTION -N -e "SELECT ilens_version FROM "$VERSION_DB.$VERSION_RELEASE_TABLE" where module_name = '$CI_PROJECT_NAME'")
- RELEASE=$($MYSQL_CONNECTION -N -e "SELECT release_version FROM "$VERSION_DB.$VERSION_RELEASE_TABLE" where module_name = '$CI_PROJECT_NAME'")
- FEATURE=$($MYSQL_CONNECTION -N -e "SELECT feature_version FROM "$VERSION_DB.$VERSION_RELEASE_TABLE" where module_name = '$CI_PROJECT_NAME'")
- PATCH=$($MYSQL_CONNECTION -N -e "SELECT patch_version FROM "$VERSION_DB.$VERSION_RELEASE_TABLE" where module_name = '$CI_PROJECT_NAME'")
script:
- SOURCE_BRANCH=$(echo $CI_COMMIT_TITLE | cut -f 3 -d " " | cut -f 1 -d "/" | cut -f 2 -d "'")
- >
if [ "$SOURCE_BRANCH" = "QA" ]; then
((RELEASE=RELEASE+1)) && FEATURE=0 && PATCH=0;
TAG_NAME=v$ILENS.$RELEASE.$FEATURE
IMAGE_URL=azrilensprod.azurecr.io/ilens/release/versions/v"$ILENS.$RELEASE:$CI_PROJECT_NAME-$TAG_NAME"
PROD=$RELEASE; QA=0; DEV=0;
$MYSQL_CONNECTION -e "UPDATE $VERSION_DB.$DB_TABLE SET prod='$PROD' ,qa='$QA', dev='$DEV' WHERE module_name='$CI_PROJECT_NAME' AND type='Service' AND category='Server' AND os='docker'"
elif [ $SOURCE_BRANCH == "feature" ]; then
((FEATURE=FEATURE+1)) && PATCH=0;
TAG_NAME=v$ILENS.$RELEASE.$FEATURE
IMAGE_URL=azrilensprod.azurecr.io/ilens/release/versions/v"$ILENS.$RELEASE:$CI_PROJECT_NAME-$TAG_NAME"
elif [ $SOURCE_BRANCH == "patch" ]; then
((PATCH=PATCH+1));
TAG_NAME=v$ILENS.$RELEASE.$FEATURE.$PATCH
IMAGE_URL=azrilensprod.azurecr.io/ilens/release/versions/v"$ILENS.$RELEASE:$CI_PROJECT_NAME-$TAG_NAME"
else
exit 1
fi
- echo -e "\n\nImage:" $IMAGE_URL >> ReleaseNote.txt
- sed -i "1s|^|Version":" $TAG_NAME\n|" ReleaseNote.txt
- sed -i "1s|^|Module Name":" $CI_PROJECT_NAME\n|" ReleaseNote.txt
- docker build -t $IMAGE_URL .
- docker push $IMAGE_URL
- docker rmi --force $IMAGE_URL
- URL=$(echo $CI_PROJECT_URL | sed 's|https://||')
- git remote set-url origin https://$GIT_USRNAME:$GIT_USRPASSWD@$URL
- git config user.email "devopsilens@gmail.com"
- git config user.name "$GIT_USRNAME"
- git tag -a $TAG_NAME -F ReleaseNote.txt
- git push origin $TAG_NAME
- $MYSQL_CONNECTION -e "UPDATE $VERSION_DB.$VERSION_RELEASE_TABLE SET release_version='$RELEASE', feature_version='$FEATURE', patch_version='$PATCH' WHERE module_name = '$CI_PROJECT_NAME' "
- $MYSQL_CONNECTION -e "INSERT INTO $HISTORY_DB.$VERSION_RELEASE_TABLE values('$CI_JOB_ID', '$CI_PROJECT_NAME','iLens', '$ILENS.$RELEASE.$FEATURE', '$CI_COMMIT_SHA', '$GITLAB_USER_NAME', '$CI_COMMIT_REF_NAME')"
tags:
- shell
only:
- master
#~~~~~| QA K8 AKS |~~~~~#
qa-aks-deployment:
stage: deploy
script:
- REGISTRY_URL=azacrknowledgelens.azurecr.io/knowledgelens/products/ilens/qa
- export KUBECONFIG=/home/gitlab-runner/.kube/config-aks-qa
- NAMESPACE=ilens-core
- QA=`expr $QA + 1` && DEV=0
- docker build -t $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV .
- docker push $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV
only:
- QA
tags:
- shell
tag-update-qa:
stage: update
script:
- QA=`expr $QA + 1` && DEV=0
- REGISTRY_URL=azacrknowledgelens.azurecr.io/knowledgelens/products/ilens/qa
- docker rmi --force $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV
- $MYSQL_CONNECTION -e "INSERT INTO $HISTORY_DB.$DB_TABLE values('$CI_JOB_ID','Server','Service', '$CI_PROJECT_NAME','docker', '$PROD.$QA.$DEV', '$CI_COMMIT_SHA', '$GITLAB_USER_NAME', '$CI_COMMIT_REF_NAME')"
- $MYSQL_CONNECTION -e "UPDATE $VERSION_DB.$DB_TABLE SET prod='$PROD' ,qa='$QA', dev='$DEV' WHERE module_name = '$CI_PROJECT_NAME' AND type = 'Service' AND category = 'Server' AND os = 'docker'"
dependencies:
- qa-aks-deployment
only:
- QA
tags:
- shell
# Default ignored files
/shelf/
/workspace.xml
# CodeStream ignored files
/../../batch_analytics\.idea/codestream.xml
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="PyDocumentationSettings">
<option name="format" value="PLAIN" />
<option name="myDocStringFormat" value="Plain" />
</component>
</module>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CodeStream">
<option name="webViewContext" value="{&quot;chatProviderAccess&quot;:&quot;strict&quot;,&quot;currentTeamId&quot;:&quot;&quot;,&quot;currentStreamId&quot;:&quot;&quot;,&quot;pullRequestCheckoutBranch&quot;:false,&quot;isRepositioning&quot;:false,&quot;onboardStep&quot;:0,&quot;panelStack&quot;:[&quot;landing-redirect&quot;],&quot;hasFocus&quot;:false,&quot;channelFilter&quot;:&quot;all&quot;,&quot;channelsMuteAll&quot;:false,&quot;codemarkFileFilter&quot;:&quot;all&quot;,&quot;codemarkTypeFilter&quot;:&quot;all&quot;,&quot;codemarkTagFilter&quot;:&quot;all&quot;,&quot;codemarkBranchFilter&quot;:&quot;all&quot;,&quot;codemarkAuthorFilter&quot;:&quot;all&quot;,&quot;codemarksFileViewStyle&quot;:&quot;inline&quot;,&quot;codemarksShowArchived&quot;:false,&quot;codemarksShowResolved&quot;:false,&quot;codemarksWrapComments&quot;:false,&quot;showFeedbackSmiley&quot;:true,&quot;route&quot;:{&quot;name&quot;:&quot;newUserEntry&quot;,&quot;params&quot;:{}},&quot;spatialViewShowPRComments&quot;:false,&quot;currentPullRequestNeedsRefresh&quot;:{&quot;needsRefresh&quot;:false,&quot;providerId&quot;:&quot;&quot;,&quot;pullRequestId&quot;:&quot;&quot;},&quot;__teamless__&quot;:{&quot;selectedRegion&quot;:&quot;us&quot;},&quot;sessionStart&quot;:1674199810926}" />
</component>
</project>
\ No newline at end of file
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredPackages">
<value>
<list size="68">
<item index="0" class="java.lang.String" itemvalue="pymongo" />
<item index="1" class="java.lang.String" itemvalue="kafka-python" />
<item index="2" class="java.lang.String" itemvalue="sklearn" />
<item index="3" class="java.lang.String" itemvalue="python-dotenv" />
<item index="4" class="java.lang.String" itemvalue="mlflow" />
<item index="5" class="java.lang.String" itemvalue="scipy" />
<item index="6" class="java.lang.String" itemvalue="azure-storage-blob" />
<item index="7" class="java.lang.String" itemvalue="SQLAlchemy" />
<item index="8" class="java.lang.String" itemvalue="matplotlib" />
<item index="9" class="java.lang.String" itemvalue="PyYAML" />
<item index="10" class="java.lang.String" itemvalue="requests" />
<item index="11" class="java.lang.String" itemvalue="numpy" />
<item index="12" class="java.lang.String" itemvalue="pandas" />
<item index="13" class="java.lang.String" itemvalue="meteostat" />
<item index="14" class="java.lang.String" itemvalue="psycopg2" />
<item index="15" class="java.lang.String" itemvalue="lightgbm" />
<item index="16" class="java.lang.String" itemvalue="SVR" />
<item index="17" class="java.lang.String" itemvalue="protobuf" />
<item index="18" class="java.lang.String" itemvalue="simplejson" />
<item index="19" class="java.lang.String" itemvalue="sqlparse" />
<item index="20" class="java.lang.String" itemvalue="httpx" />
<item index="21" class="java.lang.String" itemvalue="python-dateutil" />
<item index="22" class="java.lang.String" itemvalue="psycopg2-binary" />
<item index="23" class="java.lang.String" itemvalue="boltons" />
<item index="24" class="java.lang.String" itemvalue="Jinja2" />
<item index="25" class="java.lang.String" itemvalue="redis" />
<item index="26" class="java.lang.String" itemvalue="jwt-signature-validator" />
<item index="27" class="java.lang.String" itemvalue="XlsxWriter" />
<item index="28" class="java.lang.String" itemvalue="celery" />
<item index="29" class="java.lang.String" itemvalue="pycryptodomex" />
<item index="30" class="java.lang.String" itemvalue="fdfgen" />
<item index="31" class="java.lang.String" itemvalue="shortuuid" />
<item index="32" class="java.lang.String" itemvalue="PyPDF2" />
<item index="33" class="java.lang.String" itemvalue="uvicorn" />
<item index="34" class="java.lang.String" itemvalue="pdfjinja" />
<item index="35" class="java.lang.String" itemvalue="pendulum" />
<item index="36" class="java.lang.String" itemvalue="pydantic" />
<item index="37" class="java.lang.String" itemvalue="Werkzeug" />
<item index="38" class="java.lang.String" itemvalue="aiofiles" />
<item index="39" class="java.lang.String" itemvalue="weasyprint" />
<item index="40" class="java.lang.String" itemvalue="cryptography" />
<item index="41" class="java.lang.String" itemvalue="reportlab" />
<item index="42" class="java.lang.String" itemvalue="orjson" />
<item index="43" class="java.lang.String" itemvalue="python-multipart" />
<item index="44" class="java.lang.String" itemvalue="pdfrw" />
<item index="45" class="java.lang.String" itemvalue="sqlalchemy" />
<item index="46" class="java.lang.String" itemvalue="docx" />
<item index="47" class="java.lang.String" itemvalue="openpyxl-image-loader" />
<item index="48" class="java.lang.String" itemvalue="pyyaml" />
<item index="49" class="java.lang.String" itemvalue="fastapi" />
<item index="50" class="java.lang.String" itemvalue="docxtpl" />
<item index="51" class="java.lang.String" itemvalue="pdf2image" />
<item index="52" class="java.lang.String" itemvalue="paho-mqtt" />
<item index="53" class="java.lang.String" itemvalue="aiohttp" />
<item index="54" class="java.lang.String" itemvalue="pycryptodome" />
<item index="55" class="java.lang.String" itemvalue="pytz" />
<item index="56" class="java.lang.String" itemvalue="pyjwt" />
<item index="57" class="java.lang.String" itemvalue="formio-data" />
<item index="58" class="java.lang.String" itemvalue="openpyxl" />
<item index="59" class="java.lang.String" itemvalue="ujson" />
<item index="60" class="java.lang.String" itemvalue="catboost" />
<item index="61" class="java.lang.String" itemvalue="loguru" />
<item index="62" class="java.lang.String" itemvalue="xgboost" />
<item index="63" class="java.lang.String" itemvalue="pycaret" />
<item index="64" class="java.lang.String" itemvalue="optuna" />
<item index="65" class="java.lang.String" itemvalue="prophet" />
<item index="66" class="java.lang.String" itemvalue="scikit-learn" />
<item index="67" class="java.lang.String" itemvalue="kfp" />
</list>
</value>
</option>
</inspection_tool>
<inspection_tool class="PyPep8Inspection" enabled="true" level="WEAK WARNING" enabled_by_default="true">
<option name="ignoredErrors">
<list>
<option value="E122" />
</list>
</option>
</inspection_tool>
</profile>
</component>
\ No newline at end of file
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.9 (batch_analytics)" project-jdk-type="Python SDK" />
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/batch_analytics.iml" filepath="$PROJECT_DIR$/.idea/batch_analytics.iml" />
</modules>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>
\ No newline at end of file
FROM python:3.9-buster
COPY . /code
WORKDIR /code
RUN pip install -r requirements.txt
CMD [ "python","app.py" ]
\ No newline at end of file
# batch_analytics
# Batch Analytics
import pandas as pd
from loguru import logger
import warnings
warnings.filterwarnings('ignore')
base_path = f'data'
class BatchMaster:
def __init__(self):
self.sheet = "P2E Golden batch 05-01-2023.xlsx"
def read_file(self, sheet_name, skiprows=1):
try:
df = pd.read_excel(f'{base_path}/{self.sheet}', sheet_name=sheet_name,
skiprows=skiprows)
return df
except Exception as e:
logger.exception(f"Exception - {e}")
def read_df(self):
try:
df_stage_1 = self.read_file(sheet_name="Stage I")
df_stage_2 = self.read_file(sheet_name="Stage II")
df_stage_3 = self.read_file(sheet_name="Stage III")
df_stage_4 = self.read_file(sheet_name="Stage IV")
df_stage_5 = self.read_file(sheet_name="Stage V")
logger.debug(f'{df_stage_1.shape}, {df_stage_2.shape}, {df_stage_3.shape}, {df_stage_4.shape},'
f'{df_stage_5.shape},')
return df_stage_1, df_stage_2, df_stage_3, df_stage_4, df_stage_5
except Exception as e:
logger.exception(f"Exception - {e}")
@staticmethod
def preprocess_master_df(df, process_stage_name, batch_product, ideal_batch_cycle_time_hr, batch_setup_time_hr,
process_stage_id, work_order_no="P2E-STD-01", work_order_item_no="P2E-STD-01-01",
final_product="P2E"):
try:
df.rename(columns={'Batch Number': 'batch_no', 'Start Time.1': 'batch_start_time',
'End Time.1': 'batch_end_time', 'Downtime': 'downtime',
'Equipment': 'used_equipment'}, inplace=True)
df_stage = pd.DataFrame(index=[i for i in range(len(df))])
if process_stage_name == "Stage-01":
df_stage['formaldehyde_content'] = df['F00041-BULK-001 content nil']
df_stage['unreactive_alpha_picoline'] = df['un reacted F00056-BULK-001 ( for infor.)']
df_stage['moisture_content'] = df['Moisture content \n(for infor)']
df_stage['input_qty'] = df['Input Qty. (kg) F00056-BULK-001']
elif process_stage_name == "Stage-02":
df_stage['formaldehyde_content'] = None
df_stage['input_qty'] = df['Input Qty.\n(kg) Stage-I']
df_stage['unreactive_alpha_picoline'] = df['F00056-BULK-001 \n(NLT 50 %)']
df_stage['moisture_content'] = df['Moisture content \n(for Information).1']
elif process_stage_name == "Stage-03":
df_stage['formaldehyde_content'] = None
df_stage['input_qty'] = df['Input Qty. (kg) Stage-I']
df_stage['moisture_content'] = df['M/C for infor']
df_stage['unreactive_alpha_picoline'] = None
elif process_stage_name == "Stage-04":
df_stage['formaldehyde_content'] = None
df_stage['input_qty'] = None
df_stage['moisture_content'] = df['NMT 0.5%']
df_stage['unreactive_alpha_picoline'] = None
elif process_stage_name == "Stage-05":
df_stage['formaldehyde_content'] = None
df_stage['input_qty'] = None
df_stage['moisture_content'] = df[' NMT 1.0%']
df_stage['unreactive_alpha_picoline'] = None
df_stage['batch_no'] = df['batch_no']
df_stage['process_stage_name'] = process_stage_name
df_stage['process_stage_id'] = process_stage_id
df_stage['work_order_no'] = work_order_no
df_stage['work_order_item_no'] = work_order_item_no
df_stage['final_product'] = final_product
df_stage['batch_product'] = batch_product
df_stage['batch_start_time'] = df['batch_start_time']
df_stage['batch_end_time'] = df['batch_end_time']
df_stage['downtime'] = pd.to_timedelta(df['downtime'], unit='hours')
df_stage['ideal_batch_cycle_time_hr'] = ideal_batch_cycle_time_hr
df_stage['batch_cycle_time'] = df_stage['batch_end_time'] - df_stage['batch_start_time'] - df_stage['downtime']
df_stage['batch_cycle_time_minutes'] = df_stage['batch_cycle_time'].dt.total_seconds() // 60
df_stage['batch_setup_time_hr'] = batch_setup_time_hr
df_stage['selected_equipments'] = df['used_equipment']
df_stage['created_on'] = df['batch_start_time'].dt.date
df_stage['created_by'] = 'Aakash'
df_stage['last_updated_on'] = df['batch_end_time'].dt.date
df_stage['last_updated_by'] = 'Aakash'
df_stage = df_stage.astype({'batch_cycle_time': str})
df_stage = df_stage[['batch_no', 'final_product', 'batch_product', 'process_stage_name', 'process_stage_id',
'work_order_no', 'work_order_item_no', 'ideal_batch_cycle_time_hr', 'batch_start_time',
'batch_end_time', 'downtime', 'batch_cycle_time', 'batch_cycle_time_minutes',
'batch_setup_time_hr', 'selected_equipments', 'input_qty', 'formaldehyde_content',
'unreactive_alpha_picoline', 'moisture_content', 'created_on', 'created_by',
'last_updated_on', 'last_updated_by']]
df_stage.reset_index(drop=True, inplace=True)
return df_stage
except Exception as e:
logger.exception(f"Exception - {e}")
@staticmethod
def preprocess_master_df_proto(df, process_stage_name, batch_product, ideal_batch_cycle_time_hr, batch_setup_time_hr,
process_stage_id, work_order_no="P2E-STD-01", work_order_item_no="P2E-STD-01-01",
final_product="P2E-Stage-05"):
try:
df = df[['Batch Number', 'Start Time.1', 'End Time.1', 'Downtime', 'Equipment']]
df.rename(columns={'Batch Number': 'batch_no', 'Start Time.1': 'batch_start_time',
'End Time.1': 'batch_end_time', 'Downtime': 'downtime',
'Equipment': 'used_equipment'}, inplace=True)
df['process_stage_name'] = process_stage_name
df['process_stage_id'] = process_stage_id
df['work_order_no'] = work_order_no
df['work_order_item_no'] = work_order_item_no
df['final_product'] = final_product
df['batch_product'] = batch_product
df['ideal_batch_cycle_time_hr'] = ideal_batch_cycle_time_hr
df['batch_cycle_time'] = df['batch_end_time'] - df['batch_start_time']
df['batch_cycle_time_minutes'] = df['batch_cycle_time'].dt.total_seconds() // 60
df['batch_setup_time_hr'] = batch_setup_time_hr
df['selected_equipments'] = df['used_equipment']
df['created_on'] = df['batch_start_time'].dt.date
df['created_by'] = 'Aakash'
df['last_updated_on'] = df['batch_end_time'].dt.date
df['last_updated_by'] = 'Aakash'
df = df.astype({'batch_cycle_time': str})
df = df[['batch_no', 'final_product', 'batch_product', 'process_stage_name', 'process_stage_id',
'work_order_no', 'work_order_item_no', 'ideal_batch_cycle_time_hr', 'batch_start_time',
'batch_end_time', 'batch_cycle_time', 'batch_cycle_time_minutes', 'batch_setup_time_hr',
'downtime', 'selected_equipments', 'created_on', 'created_by', 'last_updated_on',
'last_updated_by']]
df.reset_index(drop=True, inplace=True)
return df
except Exception as e:
logger.exception(f"Exception - {e}")
@staticmethod
def preprocess_kpi_df(df, kpi, process_stage_name, process_stage_id):
try:
if kpi == "Quality":
df = df[['Batch Number', 'Start Time.1', 'End Time.1', 'Quality']]
df.rename(columns={'Batch Number': 'batch_no', 'Start Time.1': 'batch_start_time',
'End Time.1': 'batch_end_time', 'Quality': 'kpi_value'}, inplace=True)
df['process_stage_name'] = process_stage_name
df['process_stage_id'] = process_stage_id
df['kpi_name'] = kpi
df['kpi_description'] = f'{kpi} of this batch'
return df
except Exception as e:
logger.exception(f"Exception - {e}")
def orchestrator_kpi(self):
try:
df_stage_1, df_stage_2, df_stage_3, df_stage_4, df_stage_5 = self.read_df()
df_stage_1 = self.preprocess_kpi_df(df_stage_1, kpi="Quality", process_stage_name="Stage-01",
process_stage_id="P2E-STAGE-001")
df_stage_2 = self.preprocess_kpi_df(df_stage_2, kpi="Quality", process_stage_name="Stage-02",
process_stage_id="P2E-STAGE-002")
df_stage_3 = self.preprocess_kpi_df(df_stage_3, kpi="Quality", process_stage_name="Stage-03",
process_stage_id="P2E-STAGE-003")
df_stage_4 = self.preprocess_kpi_df(df_stage_4, kpi="Quality", process_stage_name="Stage-04",
process_stage_id="P2E-STAGE-004")
df_stage_5 = self.preprocess_kpi_df(df_stage_5, kpi="Quality", process_stage_name="Stage-05",
process_stage_id="P2E-STAGE-005")
df = pd.concat([df_stage_1, df_stage_2, df_stage_3, df_stage_4, df_stage_5], axis=0)
# print(df.columns)
df = df[['batch_no', 'process_stage_name', 'process_stage_id',
'batch_start_time', 'batch_end_time', 'kpi_name',
'kpi_description', 'kpi_value']]
df.reset_index(drop=True, inplace=True)
df.to_excel(f"{base_path}/batch_kpi_master.xlsx", index=False)
logger.info(f'Pushing batch_kpi_master to postgres')
df.set_index('batch_no').to_sql("batch_kpi_master",
"postgresql://ilens:iLens$456@192.168.0.207:5328/ilens_ai",
if_exists="replace")
logger.debug(f'Pushed batch_kpi_master to postgres')
return df
except Exception as e:
logger.exception(f"Exception - {e}")
def orchestrator_master(self):
try:
df_stage_1, df_stage_2, df_stage_3, df_stage_4, df_stage_5 = self.read_df()
df_stage_1 = self.preprocess_master_df(df_stage_1, process_stage_name="Stage-01",
process_stage_id="P2E-STAGE-001",
batch_product="P2E-Stage-01", ideal_batch_cycle_time_hr=12,
batch_setup_time_hr=1)
df_stage_2 = self.preprocess_master_df(df_stage_2, process_stage_name="Stage-02",
process_stage_id="P2E-STAGE-002",
batch_product="P2E-Stage-02", ideal_batch_cycle_time_hr=24,
batch_setup_time_hr=1)
df_stage_3 = self.preprocess_master_df(df_stage_3, process_stage_name="Stage-03",
process_stage_id="P2E-STAGE-003",
batch_product="P2E-Stage-03", ideal_batch_cycle_time_hr=48,
batch_setup_time_hr=1)
df_stage_4 = self.preprocess_master_df(df_stage_4, process_stage_name="Stage-04",
process_stage_id="P2E-STAGE-004",
batch_product="P2E-Stage-04", ideal_batch_cycle_time_hr=124,
batch_setup_time_hr=1)
df_stage_5 = self.preprocess_master_df(df_stage_5, process_stage_name="Stage-05",
process_stage_id="P2E-STAGE-005",
batch_product="P2E-Stage-05", ideal_batch_cycle_time_hr=None,
batch_setup_time_hr=1)
df = pd.concat([df_stage_1, df_stage_2, df_stage_3, df_stage_4, df_stage_5], axis=0)
df.reset_index(drop=True, inplace=True)
df.to_excel(f"{base_path}/batch_master.xlsx", index=False)
logger.info(f'Pushing batch_master to postgres')
df.set_index('batch_no').to_sql("batch_master",
"postgresql://ilens:iLens$456@192.168.0.207:5328/ilens_ai",
if_exists="replace")
logger.debug(f'Pushed batch_master to postgres')
return df
except Exception as e:
logger.exception(f"Exception - {e}")
@staticmethod
def join_df(data_master, data_kpi):
try:
df = pd.merge(left=data_master, right=data_kpi[['batch_no', 'kpi_name', 'kpi_description',
'kpi_value']], how='left', on='batch_no')
df.reset_index(drop=True, inplace=True)
df.to_excel(f"{base_path}/master_join.xlsx", index=False)
logger.info(f'Pushing master_join to postgres')
df.set_index('batch_no').to_sql("master_join", "postgresql://ilens:iLens$456@192.168.0.207:5328/ilens_ai", if_exists='replace')
logger.debug(f'Pushed master_join to postgres')
except Exception as e:
logger.exception(f"Exception - {e}")
def orchestrator_master_proto(self):
try:
df_stage_1, df_stage_2, df_stage_3, df_stage_4, df_stage_5 = self.read_df()
df_stage_1 = self.preprocess_master_df(df_stage_1, process_stage_name="Stage-01",
process_stage_id="P2E-STAGE-001",
batch_product="P2E-Stage-01", ideal_batch_cycle_time_hr=12,
batch_setup_time_hr=1)
df_stage_2 = self.preprocess_master_df(df_stage_2, process_stage_name="Stage-02",
process_stage_id="P2E-STAGE-002",
batch_product="P2E-Stage-02", ideal_batch_cycle_time_hr=24,
batch_setup_time_hr=1)
df_stage_3 = self.preprocess_master_df(df_stage_3, process_stage_name="Stage-03",
process_stage_id="P2E-STAGE-003",
batch_product="P2E-Stage-03", ideal_batch_cycle_time_hr=48,
batch_setup_time_hr=1)
df_stage_4 = self.preprocess_master_df(df_stage_4, process_stage_name="Stage-04",
process_stage_id="P2E-STAGE-004",
batch_product="P2E-Stage-04", ideal_batch_cycle_time_hr=124,
batch_setup_time_hr=1)
df_stage_5 = self.preprocess_master_df(df_stage_5, process_stage_name="Stage-05",
process_stage_id="P2E-STAGE-005",
batch_product="P2E-Stage-05", ideal_batch_cycle_time_hr=None,
batch_setup_time_hr=1)
df = pd.concat([df_stage_1, df_stage_2, df_stage_3, df_stage_4, df_stage_5], axis=0)
df.reset_index(drop=True, inplace=True)
df.to_excel(f"{base_path}/batch_master_proto.xlsx", index=False)
logger.info(f'Pushing batch_master to postgres')
df.set_index('batch_no').to_sql("batch_master_proto",
"postgresql://ilens:iLens$456@192.168.0.207:5328/ilens_ai",
if_exists="replace")
logger.debug(f'Pushed batch_master to postgres')
return df
except Exception as e:
logger.exception(f"Exception - {e}")
if __name__=="__main__":
batch_master = BatchMaster()
df_master = batch_master.orchestrator_master_proto()
df_kpi = batch_master.orchestrator_kpi()
batch_master.join_df(df_master=df_master, df_kpi=df_kpi)
logger.info(f'{df_master.shape}')
[KAIROS_DB]
uri=$KAIROS_URI
[KAFKA]
kafka_host=$KAFKA_HOST
kafka_port=$KAFKA_PORT
kafka_topic=$KAFKA_TOPIC
[TIMEZONE]
default_tz=$DEFAULT_TZ
required_tz=$REQUIRED_TZ
[HISTORICAL_DATA]
start_relative=$START_RELATIVE
end_relative=$END_RELATIVE
[FORECAST_DATA]
days_of_degradation_prediction=$DAYS_OF_DEGRADATION_PREDICTION
plants:
plant_kadappa: ./conf/plant_info/kadapa_plant.yml
# plant_ariyalur: ./conf/plant_info/ariyalur_plant.yml
# plant_lanka: ./conf/plant_info/lanka_plant.yml
# plant_dalmiapuram: ./conf/plant_info/dalmiapuram_plant.yml
# plant_bcw : ./conf/plant_info/bcw_plant.yml
KAIROS_URI=https://iLens:iLensDAL$456@dalmia.ilens.io/kairos/
KAFKA_HOST=192.168.0.220
KAFKA_PORT=9092
KAFKA_TOPIC=ilens_dev
DEFAULT_TZ="UTC"
REQUIRED_TZ="Asia/Kolkata"
START_RELATIVE=90
END_RELATIVE=0
DAYS_OF_DEGRADATION_PREDICTION=7
APP_NAME=ebpr_report_engine
MONGO_URI=mongodb://192.168.0.220:2717/
KAIROS_URI= http://192.168.0.220:8080
MQTT_URL=192.168.0.220
MQTT_PORT=1883
SCHEDULER_PROXY=http://192.168.0.220/ilens-scheduler
ILENS_META_SERVICE_URL=http://192.168.0.220/ilens_api
EBPR_DATA_ENGINE_PROXY=http://192.168.0.220/formde
EBPR_PROXY=http://192.168.0.220/ebpr
WORKFLOW_MGMT_PROXY = http://jubilant.ilens.io/workflow-mt
REDIS_HOST=192.168.0.220
REDIS_PORT=6379
REDIS_BROKER_URI_CELERY = redis://192.168.0.220:6379/10
SECURITY_IP_CHECK=false
SECURITY_USER_CHECK=true
SECURITY_AGENT_CHECK=true
LOG_LEVEL=INFO
LOG_TRACEBACK=true
REPORT_DIRECTORY=reports
BASE_DIRECTORY=data
EBPR_REPORT_PROXY=http://192.168.0.220/ebpr_reports
ENABLE_CELERY_WORKER=true
HUB_NAME=
CONNECTION_STRING=
API_VERSION=
MOBILE_PUSH_NOTIFICATION=false
KAFKA_HOST=192.168.0.220
KAFKA_PORT=9092
KAFKA_TOPIC=ilens_dev
START_DATE=1627776020
END_DATE=1628035220
\ No newline at end of file
pytz==2021.3
loguru==0.5.3
scipy==1.7.1
numpy==1.21.2
pandas==1.3.3
simplejson==3.17.5
requests==2.26.0
pydantic==1.8.2
python-dotenv==0.19.2
PyYAML==6.0
kafka-python==1.4.7
SQLAlchemy==1.3.20
sqlparse==0.4.2
psycopg2==2.9.1
protobuf==3.20.*
matplotlib==3.6.1
from dotenv import load_dotenv
import os
import sys
from configparser import ConfigParser, BasicInterpolation
import yaml
from loguru import logger
# Configuration File Constants
_application_conf = f"./conf/application.conf"
_default_conf = f"./config.env"
data_conf = f"./conf/data.yml"
training_data = f"./data/training.csv"
load_dotenv(dotenv_path=_default_conf)
class EnvInterpolation(BasicInterpolation):
"""
Interpolation which expands environment variables in values.
"""
def before_get(self, parser, section, option, value, defaults):
value = super().before_get(parser, section, option, value, defaults)
if not os.path.expandvars(value).startswith("$"):
return os.path.expandvars(value)
else:
return
try:
config = ConfigParser(interpolation=EnvInterpolation())
config.read(_application_conf)
except Exception as e:
print(f"Error while loading the config: {e}")
print("Failed to Load Configuration. Exiting!!!")
sys.exit()
class Logging:
level = config.get("LOGGING", "level", fallback="INFO")
level = level if level else "INFO"
tb_flag = config.getboolean("LOGGING", "traceback", fallback=True)
tb_flag = tb_flag if tb_flag is not None else True
class Kafka:
kairos_db_uri = config['KAIROS_DB']['uri']
kafka_host = config["KAFKA"]["kafka_host"]
kafka_port = config["KAFKA"]["kafka_port"]
kafka_topic = config["KAFKA"]["kafka_topic"]
class HistoricalData:
start_relative = config['HISTORICAL_DATA']['start_relative']
end_relative = config['HISTORICAL_DATA']['end_relative']
class ForecastData:
days_of_degradation_prediction = config['FORECAST_DATA']['days_of_degradation_prediction']
class TimeZone:
default_tz = config["TIMEZONE"]["default_tz"]
required_tz = config["TIMEZONE"]["required_tz"]
# Read the configuration file
yml_file_path = "conf/"
try:
with open(os.path.join(yml_file_path, 'data.yml'), 'r') as engine_yml_file:
config_engine = yaml.full_load(engine_yml_file)
except Exception as e:
logger.exception(f"Exception occurred - {e}", exc_info=True)
plants = config_engine["plants"]
class PostgresConstants:
upper_threshold_table = "solar_maintenance_plan_upper_band"
lower_threshold_table = "solar_maintenance_plan_lower_band"
threshold_table = "solar_maintenance_plan"
depletion_table = "solar_degradation_master"
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
from typing import Any, Dict
from pydantic import BaseModel
class AzureDataModel(BaseModel):
blob_name: str
data: Any
class KafkaDataModel(BaseModel):
data: Dict[str, Any]
site_id: str
gw_id: str = ''
pd_id: str = ''
timestamp: int
msg_id: int = 0
partition: str = ''
retain_flag: bool = False
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment