Commit ec0853ab authored by vaisakh.nair's avatar vaisakh.nair 🎯

new data added

parent 0297bb70
Pipeline #53022 failed with stage
#Ignore the logs directory
logs/
#Ignoring the password file
passwords.txt
#Ignoring git and cache folders
.git
.cache
.gitignore
.gitlab-ci.yml
variables.yml
#Ignoring all the markdown and class files
*.md
**/*.class
.env
__pycache__
*.pyc
*.pyo
*.pyd
.Python
.env
pip-log.txt
pip-delete-this-directory.txt
.tox
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*,cover
*.log
\ No newline at end of file
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
*.pyc
*.iml
*.xml
*.patch
idea/
\ No newline at end of file
stages:
- auto-tagging
- gmp-auto-tagging
- validate
- scan
- build
- deploy
- update
variables:
MYSQL_CONNECTION: "mysql -h $MYSQL_HOST -u $MYSQL_USER -p$MYSQL_PASS "
STATUS_SCRIPT: /home/gitlab-runner/monitor/deployment-status.sh
HELM_CHART: /home/gitlab-runner/kubernetes/ilens/$QA_ENV/ilens-modules
VARIABLES_YML: variables.yml
DEPLOYMENT_YML: form-management.yml
TIMEOUT: 960s
before_script:
- val=`echo $($MYSQL_CONNECTION -e "SELECT COUNT(*) FROM $VERSION_DB.$DB_TABLE WHERE category='Server' AND type='Service' AND os='docker' AND module_name='$CI_PROJECT_NAME' ") | cut -d " " -f2`
- if [ $val == 0 ]; then $MYSQL_CONNECTION -e "INSERT INTO $VERSION_DB.$DB_TABLE values('Server','Service','$CI_PROJECT_NAME','docker', '2', '0', '0', '0')";fi
- QA=$($MYSQL_CONNECTION -N -e "SELECT qa FROM $VERSION_DB.$DB_TABLE where module_name = '$CI_PROJECT_NAME' AND type = 'Service' AND category = 'Server' AND os = 'docker'")
- DEV=$($MYSQL_CONNECTION -N -e "SELECT dev FROM $VERSION_DB.$DB_TABLE where module_name = '$CI_PROJECT_NAME' AND type = 'Service' AND category = 'Server' AND os = 'docker'")
- PROD=$($MYSQL_CONNECTION -N -e "SELECT prod FROM $VERSION_DB.$DB_TABLE where module_name = '$CI_PROJECT_NAME' AND type = 'Service' AND category = 'Server' AND os = 'docker'")
auto-tagging:
stage: auto-tagging
before_script:
- val=`echo $($MYSQL_CONNECTION -e "SELECT COUNT(*) FROM $VERSION_DB.$VERSION_RELEASE_TABLE WHERE module_name='$CI_PROJECT_NAME' ") | cut -d " " -f2`
- if [ $val == 0 ]; then $MYSQL_CONNECTION -N -e "INSERT INTO $VERSION_DB.$VERSION_RELEASE_TABLE values('$CI_PROJECT_NAME', 'iLens', '5', '6', '0', '0')";fi
- ILENS=$($MYSQL_CONNECTION -N -e "SELECT ilens_version FROM "$VERSION_DB.$VERSION_RELEASE_TABLE" where module_name = '$CI_PROJECT_NAME'")
- RELEASE=$($MYSQL_CONNECTION -N -e "SELECT release_version FROM "$VERSION_DB.$VERSION_RELEASE_TABLE" where module_name = '$CI_PROJECT_NAME'")
- FEATURE=$($MYSQL_CONNECTION -N -e "SELECT feature_version FROM "$VERSION_DB.$VERSION_RELEASE_TABLE" where module_name = '$CI_PROJECT_NAME'")
- PATCH=$($MYSQL_CONNECTION -N -e "SELECT patch_version FROM "$VERSION_DB.$VERSION_RELEASE_TABLE" where module_name = '$CI_PROJECT_NAME'")
script:
- SOURCE_BRANCH=$(echo $CI_COMMIT_TITLE | cut -f 3 -d " " | cut -f 1 -d "/" | cut -f 2 -d "'")
- >
if [ "$SOURCE_BRANCH" = "QA" ]; then
((RELEASE=RELEASE+1)) && FEATURE=0 && PATCH=0;
TAG_NAME=v$ILENS.$RELEASE.$FEATURE
IMAGE_URL=azrilensprod.azurecr.io/ilens/release/versions/v"$ILENS.$RELEASE:$CI_PROJECT_NAME-$TAG_NAME"
PROD=$RELEASE; QA=0; DEV=0;
$MYSQL_CONNECTION -e "UPDATE $VERSION_DB.$DB_TABLE SET prod='$PROD' ,qa='$QA', dev='$DEV' WHERE module_name='$CI_PROJECT_NAME' AND type='Service' AND category='Server' AND os='docker'"
elif [ $SOURCE_BRANCH == "feature" ]; then
((FEATURE=FEATURE+1)) && PATCH=0;
TAG_NAME=v$ILENS.$RELEASE.$FEATURE
IMAGE_URL=azrilensprod.azurecr.io/ilens/release/versions/v"$ILENS.$RELEASE:$CI_PROJECT_NAME-$TAG_NAME"
elif [ $SOURCE_BRANCH == "patch" ]; then
((PATCH=PATCH+1));
TAG_NAME=v$ILENS.$RELEASE.$FEATURE.$PATCH
IMAGE_URL=azrilensprod.azurecr.io/ilens/release/versions/v"$ILENS.$RELEASE:$CI_PROJECT_NAME-$TAG_NAME"
else
exit 1
fi
- echo -e "\n\nImage:" $IMAGE_URL >> ReleaseNote.txt
- sed -i "1s|^|Version":" $TAG_NAME\n|" ReleaseNote.txt
- sed -i "1s|^|Module Name":" $CI_PROJECT_NAME\n|" ReleaseNote.txt
- docker build -t $IMAGE_URL .
- docker push $IMAGE_URL
- docker rmi --force $IMAGE_URL
- URL=$(echo $CI_PROJECT_URL | sed 's|https://||')
- git remote set-url origin https://$GIT_USRNAME:$GIT_USRPASSWD@$URL
- git config user.email "devopsilens@gmail.com"
- git config user.name "$GIT_USRNAME"
- git tag -a $TAG_NAME -F ReleaseNote.txt
- git push origin $TAG_NAME
- $MYSQL_CONNECTION -e "UPDATE $VERSION_DB.$VERSION_RELEASE_TABLE SET release_version='$RELEASE', feature_version='$FEATURE', patch_version='$PATCH' WHERE module_name = '$CI_PROJECT_NAME' "
- $MYSQL_CONNECTION -e "INSERT INTO $HISTORY_DB.$VERSION_RELEASE_TABLE values('$CI_JOB_ID', '$CI_PROJECT_NAME','iLens', '$ILENS.$RELEASE.$FEATURE', '$CI_COMMIT_SHA', '$GITLAB_USER_NAME', '$CI_COMMIT_REF_NAME')"
tags:
- shell
only:
- master
#~~~~~| GMP auto-tagging |~~~~~#
gmp-auto-tagging:
stage: auto-tagging
before_script:
- val=`echo $($MYSQL_CONNECTION -e "SELECT COUNT(*) FROM $VERSION_DB.$GMP_RELEASE_TABLE WHERE module_name='$CI_PROJECT_NAME' ") | cut -d " " -f2`
- if [ $val == 0 ]; then $MYSQL_CONNECTION -N -e "INSERT INTO $VERSION_DB.$GMP_RELEASE_TABLE values('$CI_PROJECT_NAME', 'iLens', '6', '7', '0', '0')";fi
- ILENS=$($MYSQL_CONNECTION -N -e "SELECT ilens_version FROM "$VERSION_DB.$GMP_RELEASE_TABLE" where module_name = '$CI_PROJECT_NAME'")
- RELEASE=$($MYSQL_CONNECTION -N -e "SELECT release_version FROM "$VERSION_DB.$GMP_RELEASE_TABLE" where module_name = '$CI_PROJECT_NAME'")
- FEATURE=$($MYSQL_CONNECTION -N -e "SELECT feature_version FROM "$VERSION_DB.$GMP_RELEASE_TABLE" where module_name = '$CI_PROJECT_NAME'")
- PATCH=$($MYSQL_CONNECTION -N -e "SELECT patch_version FROM "$VERSION_DB.$GMP_RELEASE_TABLE" where module_name = '$CI_PROJECT_NAME'")
script:
- SOURCE_BRANCH=$(echo $CI_COMMIT_TITLE | cut -f 3 -d " " | cut -f 1 -d "/" | cut -f 2 -d "'")
- >
if [ "$SOURCE_BRANCH" = "QA" ]; then
((RELEASE=RELEASE+1)) && FEATURE=0 && PATCH=0;
TAG_NAME=gmp-v$ILENS.$RELEASE.$FEATURE
IMAGE_URL=azrilensprod.azurecr.io/ilens/gmp-release/versions/v"$ILENS.$RELEASE:$CI_PROJECT_NAME-$TAG_NAME"
PROD=$RELEASE; QA=0; DEV=0;
elif [ $SOURCE_BRANCH == "feature" ]; then
((FEATURE=FEATURE+1)) && PATCH=0;
TAG_NAME=gmp-v$ILENS.$RELEASE.$FEATURE
IMAGE_URL=azrilensprod.azurecr.io/ilens/gmp-release/versions/v"$ILENS.$RELEASE:$CI_PROJECT_NAME-$TAG_NAME"
elif [ $SOURCE_BRANCH == "patch" ]; then
((PATCH=PATCH+1));
TAG_NAME=gmp-v$ILENS.$RELEASE.$FEATURE.$PATCH
IMAGE_URL=azrilensprod.azurecr.io/ilens/gmp-release/versions/v"$ILENS.$RELEASE:$CI_PROJECT_NAME-$TAG_NAME"
else
exit 1
fi
- echo -e "\n\nImage:" $IMAGE_URL >> ReleaseNote.txt
- sed -i "1s|^|Version":" $TAG_NAME\n|" ReleaseNote.txt
- sed -i "1s|^|Module Name":" $CI_PROJECT_NAME\n|" ReleaseNote.txt
- docker build -t $IMAGE_URL .
- docker push $IMAGE_URL
- docker rmi --force $IMAGE_URL
- URL=$(echo $CI_PROJECT_URL | sed 's|https://||')
- git remote set-url origin https://$GIT_USRNAME:$GIT_USRPASSWD@$URL
- git config user.email "devopsilens@gmail.com"
- git config user.name "$GIT_USRNAME"
- git tag -a $TAG_NAME -F ReleaseNote.txt
- git push origin $TAG_NAME
- $MYSQL_CONNECTION -e "UPDATE $VERSION_DB.$GMP_RELEASE_TABLE SET release_version='$RELEASE', feature_version='$FEATURE', patch_version='$PATCH' WHERE module_name = '$CI_PROJECT_NAME' "
- $MYSQL_CONNECTION -e "INSERT INTO $HISTORY_DB.$GMP_RELEASE_TABLE values('$CI_JOB_ID', '$CI_PROJECT_NAME','iLens', '$ILENS.$RELEASE.$FEATURE', '$CI_COMMIT_SHA', '$GITLAB_USER_NAME', '$CI_COMMIT_REF_NAME')"
tags:
- shell
only:
- gmp-release
#~~~~~| Requirements.txt version check |~~~~~#
package-version-check:
stage: validate
script:
- REQUIREMENTS=$(cat requirements.txt)
- FAILED=0
- >
for REQ in ${REQUIREMENTS[@]};
do
if [ "${REQ:0:1}" = "#" ]; then continue; fi
PKG=$(echo $REQ | tr = " " | awk '{print $1}')
VER=$(echo $REQ | tr = " " | awk '{print $2}')
VER=${VER//[^[:alnum:]]/}
if [ ! -z "${VER//[0-9]}" ] || [ -z $VER ]; then
echo " Package version not specified for: $PKG "
FAILED=`expr $FAILED + 1`
fi
done
- if [ $FAILED -gt 0 ]; then exit 1; fi
only:
- QA
tags:
- shell
#~~~~~| Vulnerability Scanner |~~~~~#
vulnerability-scanner:
stage: scan
script:
- QA=`expr $QA + 1` && DEV=0
- DOCKER_IMAGE=$CI_PROJECT_NAME:vulnarable-scan
- docker build -t $DOCKER_IMAGE .
- trivy image --format template --template "@/home/gitlab-runner/image-scanner/templates/html.tpl" -o imageScanner-$CI_PROJECT_NAME.html $DOCKER_IMAGE
- trivy image --format json -o imageScanner-$CI_PROJECT_NAME.json $DOCKER_IMAGE
- docker rmi --force $DOCKER_IMAGE
- mv imageScanner-$CI_PROJECT_NAME.html /data0/email-util/module/reports/
- >
if ! /home/gitlab-runner/image-scanner/severity_check imageScanner-$CI_PROJECT_NAME.json ; then
cd /home/gitlab-runner/image-scanner/
./mail imageScanner-$CI_PROJECT_NAME.html $DOCKER_IMAGE
fi
only:
- QA
tags:
- shell
#~~~~~| QA K8 |~~~~~#
qa-k8-deployment:
stage: deploy
script:
- REGISTRY_URL=azacrknowledgelens.azurecr.io/knowledgelens/products/ilens/qa
- export KUBECONFIG=/home/gitlab-runner/.kube/$QA_ENV
- NAMESPACE=ilens-core
- QA=`expr $QA + 1` && DEV=0
- docker build -t $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV .
- docker push $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV
- echo "Deploying $CI_PROJECT_NAME"
- >
for YML in ${DEPLOYMENT_YML[@]};
do
FILE_PATH=$HELM_CHART/$YML
SERVICE=$(echo $YML | cut -f 1 -d "." )
CURR_VERSION=$(cat $FILE_PATH | grep "imageName:" )
CURR_VERSION=$(echo $CURR_VERSION | cut -f 3 -d ":")
echo " Deploying $SERVICE"
echo " $SERVICE Version: $CURR_VERSION"
sed -E -i'' "s|(.*imageName:.*"$REGISTRY_URL"/).*|\1"$CI_PROJECT_NAME":v"$PROD.$QA.$DEV"|" $FILE_PATH
helm upgrade --install $SERVICE $HELM_CHART -f $FILE_PATH -f $VARIABLES_YML -n $NAMESPACE --history-max 1
if ! sh $STATUS_SCRIPT $SERVICE $NAMESPACE $TIMEOUT ; then
sed -E -i'' "s|(.*imageName:.*"$REGISTRY_URL"/).*|\1"$CI_PROJECT_NAME":"$CURR_VERSION"|" $FILE_PATH
helm upgrade --install $SERVICE $HELM_CHART -f $FILE_PATH -f $VARIABLES_YML -n $NAMESPACE --history-max 1
echo " $SERVICE Reverted to the previous version..."
exit 1
fi
UI_POD=$(kubectl get pods -n $NAMESPACE | grep ilens-ui | awk '{print $1}')
UI_POD_LIST=($UI_POD)
if [ ! -z "$UI_POD_LIST" ]; then
echo "Restarting Nginx"
for UI_POD_NAME in ${UI_POD_LIST[@]}; do
kubectl exec $UI_POD_NAME -n $NAMESPACE -- nginx -s reload
done
fi
done
only:
- QA
tags:
- shell
tag-update-qa:
stage: update
script:
- DEV=0
- REGISTRY_URL=azacrknowledgelens.azurecr.io/knowledgelens/products/ilens/qa
- docker rmi --force $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV
- QA=`expr $QA + 1` && DEV=0
- $MYSQL_CONNECTION -e "INSERT INTO $HISTORY_DB.$DB_TABLE values('$CI_JOB_ID','Server','Service', '$CI_PROJECT_NAME','docker', '$PROD.$QA.$DEV', '$CI_COMMIT_SHA', '$GITLAB_USER_NAME', '$CI_COMMIT_REF_NAME')"
- $MYSQL_CONNECTION -e "UPDATE $VERSION_DB.$DB_TABLE SET prod='$PROD' ,qa='$QA', dev='$DEV' WHERE module_name = '$CI_PROJECT_NAME' AND type = 'Service' AND category = 'Server' AND os = 'docker'"
dependencies:
- qa-k8-deployment
only:
- QA
tags:
- shell
# #~~~~~| JUBILANT PRE PROD |~~~~~#
# jubilant-pre-prod-deployment:
# stage: deploy
# script:
# - REGISTRY_URL=azacrknowledgelens.azurecr.io/knowledgelens/products/ilens/dev
# - export KUBECONFIG=/home/gitlab-runner/.kube/dev-cluster-config-bkp
# - YML_PATH_DEV=/home/gitlab-runner/kubernetes/ilens/pre-prod/jubilant
# - NAMESPACE=core-dev
# - DEV=`expr $DEV + 1`
# - docker build -t $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV .
# - docker push $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV
# - cd $YML_PATH_DEV
# - >
# for YML in ${DEPLOYMENT_YML[@]};
# do
# SERVICE=$(echo $YML | cut -f 1 -d "." )
# CURR_VERSION=$(cat $YML | grep "image:" )
# CURR_VERSION=$(echo $CURR_VERSION | cut -f 3 -d ":")
# echo " Deploying $SERVICE"
# echo " $SERVICE Version: $CURR_VERSION"
# sed -E -i'' "s|(.*image:.*"$REGISTRY_URL"/).*|\1"$CI_PROJECT_NAME":v"$PROD.$QA.$DEV"|" $YML
# kubectl delete -f $YML
# sleep 30
# kubectl apply -f $YML
# if ! sh $STATUS_SCRIPT $SERVICE $NAMESPACE $TIMEOUT ; then
# sed -E -i'' "s|(.*image:.*"$REGISTRY_URL"/).*|\1"$CI_PROJECT_NAME":"$CURR_VERSION"|" $YML
# kubectl apply -f $YML
# echo " $SERVICE Reverted to the previous version..."
# exit 1
# fi
# done
# only:
# - pre-prod
# tags:
# - shell
# tag-update-dev:
# stage: update
# script:
# - DEV=`expr $DEV + 1`
# - REGISTRY_URL=azacrknowledgelens.azurecr.io/knowledgelens/products/ilens/dev
# - docker rmi --force $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV
# - $MYSQL_CONNECTION -e "INSERT INTO $HISTORY_DB.$DB_TABLE values('$CI_JOB_ID','Server','Service', '$CI_PROJECT_NAME','docker', '$PROD.$QA.$DEV', '$CI_COMMIT_SHA', '$GITLAB_USER_NAME', '$CI_COMMIT_REF_NAME')"
# - $MYSQL_CONNECTION -e "UPDATE $VERSION_DB.$DB_TABLE SET prod='$PROD' ,qa='$QA', dev='$DEV' WHERE module_name = '$CI_PROJECT_NAME' AND type = 'Service' AND category = 'Server' AND os = 'docker'"
# dependencies:
# - jubilant-pre-prod-deployment
# only:
# - pre-prod
# tags:
# - shell
#~~~~~| PRE PROD |~~~~~#
pre-prod-deployment:
stage: deploy
script:
- REGISTRY_URL=azacrknowledgelens.azurecr.io/knowledgelens/products/ilens/dev
- SOURCE_BRANCH=$(echo $CI_COMMIT_BRANCH | cut -f 1 -d "-")
- >
if [ ! -z $SOURCE_BRANCH ]; then
echo "Deploying $CI_PROJECT_NAME module in the $CI_COMMIT_BRANCH environment"
else
echo "Create a proper branch name, current branch name $CI_COMMIT_BRANCH"
exit 1
fi
- DOCKER_COMPOSE=/opt/service/compose/$SOURCE_BRANCH/docker-compose.yml
- DEV=`expr $DEV + 1`
- docker build -t $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV .
- docker push $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV
- IMAGE_URL=$REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV
- PROJECT_NAME=$(echo $CI_PROJECT_NAME | cut -f 1 -d ".")
- >
if [ "$SOURCE_BRANCH" = "dalmia" ] || [ "$SOURCE_BRANCH" = "jubilant" ] ; then
OLD_IMAGE=$(sshpass -p $DEV_235_PASSWD ssh $DEV_235_USERNAME@$DEV_235_HOSTNAME "cat $DOCKER_COMPOSE | grep '&$PROJECT_NAME-image' | cut -f '3' -d ' ' ")
echo "Current image":" $OLD_IMAGE"
echo "New image":" $IMAGE_URL"
sshpass -p $DEV_235_PASSWD ssh $DEV_235_USERNAME@$DEV_235_HOSTNAME "sed -E -i'' 's|(.*image:.*)'$OLD_IMAGE'|\1'$IMAGE_URL'|1' '$DOCKER_COMPOSE'"
sshpass -p $DEV_235_PASSWD ssh $DEV_235_USERNAME@$DEV_235_HOSTNAME "docker-compose -f $DOCKER_COMPOSE up -d"
elif [ "$SOURCE_BRANCH" = "hccb" ] || [ "$SOURCE_BRANCH" = "welspun" ] ; then
OLD_IMAGE=$(sshpass -p $DEV_236_PASSWD ssh $DEV_236_USERNAME@$DEV_236_HOSTNAME "cat $DOCKER_COMPOSE | grep '&$PROJECT_NAME-image' | cut -f '3' -d ' ' ")
echo "Current image":" $OLD_IMAGE"
echo "New image":" $IMAGE_URL"
sshpass -p $DEV_236_PASSWD ssh $DEV_236_USERNAME@$DEV_236_HOSTNAME "sed -E -i'' 's|(.*image:.*)'$OLD_IMAGE'|\1'$IMAGE_URL'|1' '$DOCKER_COMPOSE'"
sshpass -p $DEV_236_PASSWD ssh $DEV_236_USERNAME@$DEV_236_HOSTNAME "docker-compose -f $DOCKER_COMPOSE up -d"
else
echo "Create a branch name like "
fi
only:
- dalmia-pre-prod
- jubilant-pre-prod
- hccb-pre-prod
- welspun-pre-prod
tags:
- shell
tag-update-dev:
stage: update
script:
- DEV=`expr $DEV + 1`
- REGISTRY_URL=azacrknowledgelens.azurecr.io/knowledgelens/products/ilens/dev
- $MYSQL_CONNECTION -e "INSERT INTO $HISTORY_DB.$DB_TABLE values('$CI_JOB_ID','Server','Service', '$PROJECT_NAME','docker', '$PROD.$QA.$DEV', '$CI_COMMIT_SHA', '$GITLAB_USER_NAME', '$CI_COMMIT_REF_NAME')"
- $MYSQL_CONNECTION -e "UPDATE $VERSION_DB.$DB_TABLE SET prod='$PROD' ,qa='$QA', dev='$DEV' WHERE module_name = '$PROJECT_NAME' AND type = 'Service' AND category = 'Server' AND os = 'docker'"
dependencies:
- pre-prod-deployment
only:
- dalmia-pre-prod
- jubilant-pre-prod
- hccb-pre-prod
- welspun-pre-prod
tags:
- shell
#~~~~~| DEV 220 |~~~~~#
dev-deployment-220:
stage: deploy
script:
- tar czvf $CI_PROJECT_NAME.tar.gz *
- echo "Deploying to the dev 220 server..."
- sshpass -p $OFC_PASSWD ssh $OFC_USERNAME@$OFC_HOSTNAME "mkdir -p /tmp/$CI_PROJECT_NAME/tar/"
- sshpass -p $OFC_PASSWD ssh $OFC_USERNAME@$OFC_HOSTNAME "mkdir -p /tmp/$CI_PROJECT_NAME/untar/"
- sshpass -p $OFC_PASSWD scp $CI_PROJECT_NAME.tar.gz $OFC_USERNAME@$OFC_HOSTNAME:/tmp/$CI_PROJECT_NAME/tar/
- sshpass -p $OFC_PASSWD ssh $OFC_USERNAME@$OFC_HOSTNAME "tar xzvf /tmp/$CI_PROJECT_NAME/tar/$CI_PROJECT_NAME.tar.gz -C /tmp/$CI_PROJECT_NAME/untar/"
- sshpass -p $OFC_PASSWD ssh $OFC_USERNAME@$OFC_HOSTNAME "rsync -r /tmp/$CI_PROJECT_NAME/untar/* /opt/services/ilens2.0/$CI_PROJECT_NAME/"
- sshpass -p $OFC_PASSWD ssh $OFC_USERNAME@$OFC_HOSTNAME "/home/svc-ilens/anaconda3/envs/form-mt/bin/pip install -r /opt/services/ilens2.0/$CI_PROJECT_NAME/requirements.txt"
- sshpass -p $OFC_PASSWD ssh $OFC_USERNAME@$OFC_HOSTNAME "sudo systemctl restart ilens_2.0_dev_$CI_PROJECT_NAME.service"
- sshpass -p $OFC_PASSWD ssh $OFC_USERNAME@$OFC_HOSTNAME "sudo systemctl status ilens_2.0_dev_$CI_PROJECT_NAME.service"
after_script:
- sshpass -p $OFC_PASSWD ssh $OFC_USERNAME@$OFC_HOSTNAME "rm -rf /tmp/$CI_PROJECT_NAME"
- rm -f $CI_PROJECT_NAME.tar.gz
only:
- develop
tags:
- shell
# #~~~~~| DEV Image Build |~~~~~#
# dev-image-build:
# stage: build
# script:
# - REGISTRY_URL=azacrknowledgelens.azurecr.io/knowledgelens/products/ilens/dev
# - DEV=`expr $DEV + 1`
# - docker build -t $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV .
# - docker push $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV
# - docker rmi --force $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV
# only:
# - develop
# tags:
# - shell
# tag-update-dev:
# stage: update
# script:
# - DEV=`expr $DEV + 1`
# - $MYSQL_CONNECTION -e "INSERT INTO $HISTORY_DB.$DB_TABLE values('$CI_JOB_ID','Server','Service', '$CI_PROJECT_NAME','docker', '$PROD.$QA.$DEV', '$CI_COMMIT_SHA', '$GITLAB_USER_NAME', '$CI_COMMIT_REF_NAME')"
# - $MYSQL_CONNECTION -e "UPDATE $VERSION_DB.$DB_TABLE SET prod='$PROD' ,qa='$QA', dev='$DEV' WHERE module_name = '$CI_PROJECT_NAME' AND type = 'Service' AND category = 'Server' AND os = 'docker'"
# dependencies:
# - dev-image-build
# only:
# - develop
# tags:
# - shell
#~~~~~| CODE QUALITY |~~~~~#
codequality:
stage: deploy
image: $SONAR_SCANNER_IMAGE
script:
- /opt/sonar-scanner/bin/sonar-scanner -Dsonar.projectKey=$CI_PROJECT_NAME -Dsonar.projectName=$CI_PROJECT_NAME -Dsonar.typescript.node=./node/node -Dsonar.login=admin -Dsonar.password=$SONAR_PASSWD -Dsonar.sources=.
- sleep 5
- python3 /opt/code_quality_report/static_code_quality_report_csv_v2.py $CI_PROJECT_NAME $GITLAB_USER_EMAIL,$EMAIL_TO $EMAIL_FROM $EMAIL_PASSWD False admin $SONAR_PASSWD
only:
- develop
tags:
- docker
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.3.0
hooks:
- id: end-of-file-fixer
- id: trailing-whitespace
- id: requirements-txt-fixer
- repo: https://github.com/omnilib/ufmt
rev: v2.0.0
hooks:
- id: ufmt
additional_dependencies:
- black == 22.6.0
- usort == 1.0.4
- repo: https://github.com/PyCQA/flake8
rev: 5.0.4
hooks:
- id: flake8
args:
- "--max-line-length=120"
- "--max-complexity=20"
- "--select=B,C,E,F,W,T4,B9"
# these are errors that will be ignored by flake8
# check out their meaning here
# https://flake8.pycqa.org/en/latest/user/error-codes.html
- "--ignore=E203,E266,E501,W503,F403,F401,E402"
FROM python:3.9.10-slim
COPY requirements.txt /code/requirements.txt
WORKDIR /code
RUN pip install -r requirements.txt
COPY . /code
CMD [ "python", "app.py" ]
\ No newline at end of file
Release Note:
- version - v.6.9
Feature:
- Report Filter Support
- Back-fill enhancement Support
if __name__ == '__main__':
from dotenv import load_dotenv
load_dotenv()
import argparse
import gc
import uvicorn
from scripts.config.app_configurations import Service
from scripts.logging.logging import logger
gc.collect()
ap = argparse.ArgumentParser()
if __name__ == "__main__":
ap.add_argument(
"--port",
"-p",
required=False,
default=Service.PORT,
help="Port to start the application.",
)
ap.add_argument(
"--bind",
"-b",
required=False,
default=Service.HOST,
help="IP to start the application.",
)
arguments = vars(ap.parse_args())
logger.info(f"App Starting at {arguments['bind']}:{arguments['port']}")
uvicorn.run("main:app", host=arguments["bind"], port=int(arguments["port"]))
-----BEGIN RSA PRIVATE KEY-----
MIICWwIBAAKBgQClilTaeHq6Zc+kWHCNl1O0btGRm7ct3O5zqWx1mwwLUWH14eft
Hi5wIbOYh79JQ9BO2OA4UjPq31uwmJ96Okl0OULfENhwd/D7P3mnoRlktPT2t+tt
RRrKvx3wNpOy/3nBsXnNt8EKxyA7k9vbqLbv9pGw2hcqOYe/NGTkmm1PswIDAQAB
AoGAZPARR1l5NBkKYGKQ1rU0E+wSmx+AtVVmjF39RUSyNmB8Q+poebwSgsr58IKt
T6Yq6Tjyl0UAZTGmferCK0xJJrqyP0hMn4nNNut+acWMKyt+9YrA2FO+r5Jb9JuT
SK35xXnM4aZLGppgWJxRzctpIz+qkf6oLRSZme0AuiqcwYECQQDY+QDL3wbWplRW
bze0DsZRMkDAkNY5OCydvjte4SR/mmAzsrpNrS5NztWbaaQrefoPbsdYBPbd8rS7
C/s/0L1zAkEAw1EC5zt2STuhkcKLa/tL+bk8WHHHtf19aC9kBj1TvWBFh+JojWCo
86iK5fLcHzhyQx5Qi3E9LG2HvOWhS1iUwQJAKbEHHyWW2c4SLJ2oVXf1UYrXeGkc
UNhjclgobl3StpZCYAy60cwyNo9E6l0NR7FjhG2j7lzd1t4ZLkvqFmQU0wJATLPe
yQIwBLh3Te+xoxlQD+Tvzuf3/v9qpWSfClhBL4jEJYYDeynvj6iry3whd91J+hPI
m8o/tNfay5L+UcGawQJAAtbqQc7qidFq+KQYLnv5gPRYlX/vNM+sWstUAqvWdMze
JYUoTHKgiXnSZ4mizI6/ovsBOMJTb6o1OJCKQtYylw==
-----END RSA PRIVATE KEY-----
-----BEGIN PUBLIC KEY-----
MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQClilTaeHq6Zc+kWHCNl1O0btGR
m7ct3O5zqWx1mwwLUWH14eftHi5wIbOYh79JQ9BO2OA4UjPq31uwmJ96Okl0OULf
ENhwd/D7P3mnoRlktPT2t+ttRRrKvx3wNpOy/3nBsXnNt8EKxyA7k9vbqLbv9pGw
2hcqOYe/NGTkmm1PswIDAQAB
-----END PUBLIC KEY-----
[MODULE]
name = form-mt
[LOGGING]
level = $LOG_LEVEL
[SERVICE]
port=5121
host=0.0.0.0
version_no=1.0.0
workers=1
module_name=$APP_NAME
enable_traceback = True
secure_cookie = $SECURE_COOKIE
[MONGO_DB]
uri= $MONGO_URI
[POSTGRES]
maintenance = $MAINTENANCE_URI
assistant = $ASSISTANT_URI
[DATABASES]
metadata_db=$METADATA_DB
ilens_assistant=$ILENS_ASSISTANT_DB
ilens_asset_model=$ILENS_ASSET_MODEL_DB
[PATH_TO_SERVICES]
scheduler_proxy = $SCHEDULER_PROXY
data_engine=$FORM_DE
form_mt = $FORM_MT
metadata_services=$METADATA_SERVICES
audit_proxy=$AUDIT_PROXY
workflow_mt=$WORKFLOW_MT
ilens_events=$ILENS_EVENTS
oee_services=$OEE_SERVICES
[BACKFILL]
interval=$INTERVAL
trigger_bandwidth=$TRIGGER_BANDWIDTH
[DIRECTORY]
base_path = $BASE_PATH
mount_dir = $MOUNT_DIR
keys_path = data/keys
[REDIS]
uri=$REDIS_URI
login_db = 9
project_tags_db = 18
[KAIROS]
kairos_url = $KAIROS_URI
[KAFKA]
host=$KAFKA_HOST
port=$KAFKA_PORT
topic=$KAFKA_TOPIC
history_topic=$KAFKA_HISTORY_OUTPUT_TOPIC
audit_topic=$KAFKA_AUDIT_TOPIC
enable_sites_partition=$ENABLE_KAFKA_PARTITION
split_key=$KAFKA_PARTITION_KEY
round_robin_enable=$ROUND_ROBIN_PARTITION
partition_db=13
[AUDITING]
periodic_entry_auditing=$PERIODIC_ENTRY_AUDITING
form_non_periodic_auditing=$FORM_NON_PERIODIC_AUDITING
form_periodic_auditing=$FORM_PERIODIC_AUDITING
[PATH_TO_OTHER_SERVICES]
email_service = $EMAIL_SERVICE_PROXY
[MQTT]
uri = $MQTT_URI
host = $MQTT_URL
port = $MQTT_PORT
publish_base_topic = ilens/notifications
[EVENTS]
enable_events=$ENABLE_EVENTS
# Add in environment variables here when updated for better collaboration
MODULE_NAME=form-management
MONGO_URI=mongodb://192.168.0.220:2717/
METADATA_DB=ilens_configuration
ILENS_ASSISTANT=ilens_assistant
ILENS_ASSET_MODEL_DB=ilens_asset_model
ASSISTANT_URI=postgresql://postgres:postgres@192.168.0.220:5432/ilens_assistant
FORM_DE=http://192.168.0.220/formde/
METADATA_SERVICES=http://192.168.0.220/ilens_api/
KAIROS_URI=http://192.168.0.220:8080/
BASE_PATH=opt/services/ilens2.0/volumes
MOUNT_DIR=/form-management
REDIS_URI = redis://192.168.0.220:6379
KAFKA_HOST=192.168.0.220
KAFKA_PORT=9092
KAFKA_TOPIC=ilens_dev
KAFKA_HISTORY_OUTPUT_TOPIC=ilens_dev_backup
KAFKA_AUDIT_TOPIC=audit_logs
MAINTENANCE_URI = postgresql://postgres:postgres@192.168.0.220:5432/maintenance_logbook
FORM_MT = http://192.168.0.220/form-mt/
PERIODIC_ENTRY_AUDITING=true
FORM_NON_PERIODIC_AUDITING=true
FORM_PERIODIC_AUDITING=true
ENABLE_KAFKA_PARTITION=true
ROUND_ROBIN_PARTITION=true
INTERVAL=60
EMAIL_SERVICE_PROXY=https://cloud.ilens.io/sms-util
MQTT_URL=192.168.0.220
MQTT_PORT=1883
SECURE_COOKIE=False
CORS_URLS=staging.ilens.io
SW_DOCS_URL=/docs
SW_OPENAPI_URL=/openapi.json
ENABLE_CORS=True
AUDIT_PROXY=http://192.168.0.220/audit_tracker
LOG_LEVEL=QTRACE
\ No newline at end of file
import os
from fastapi import FastAPI, Depends
from fastapi.middleware.cors import CORSMiddleware
from jwt_signature_validator.encoded_payload import (
EncodedPayloadSignatureMiddleware as SignatureVerificationMiddleware
)
from scripts.config.app_configurations import Service
from scripts.constants.app_constants import Secrets
from scripts.core.services import (
stage_router,
comment_router,
remark_router,
form_router,
custom_router,
stages_data_router,
mobile_task_router,
health_status
)
from scripts.utils.security_utils.decorators import CookieAuthentication
secure_access = os.environ.get("SECURE_ACCESS", default=False)
auth = CookieAuthentication()
app = FastAPI(
title="iLens Assistant Form Management",
version="1.0.0",
root_path="form-mt",
description="Form Management App",
openapi_url=os.environ.get("SW_OPENAPI_URL"),
docs_url=os.environ.get("SW_DOCS_URL"),
redoc_url=None,
)
if Service.verify_signature in [True, "True", 'true']:
app.add_middleware(
SignatureVerificationMiddleware,
jwt_secret=Secrets.signature_key,
jwt_algorithms=Secrets.signature_key_alg,
protect_hosts=Service.protected_hosts,
)
if os.environ.get("ENABLE_CORS") in (True, 'true', 'True') and os.environ.get("CORS_URLS"):
app.add_middleware(
CORSMiddleware,
allow_origins=os.environ.get("CORS_URLS").split(","),
allow_credentials=True,
allow_methods=["GET", "POST", "DELETE", "PUT"],
allow_headers=["*"],
)
auth_enabled = [Depends(auth)] if secure_access in [True, 'true', 'True'] else []
app.include_router(stage_router, dependencies=auth_enabled)
app.include_router(comment_router, dependencies=auth_enabled)
app.include_router(remark_router, dependencies=auth_enabled)
app.include_router(form_router, dependencies=auth_enabled)
app.include_router(custom_router, dependencies=auth_enabled)
app.include_router(stages_data_router, dependencies=auth_enabled)
app.include_router(mobile_task_router, dependencies=auth_enabled)
app.include_router(health_status)
# aiofiles~=0.8.0
# aiohttp~=3.8.1
# crypto~=1.4.1
# pydantic~=1.9.0
# python-dateutil~=2.8.2
cryptography~=36.0.1
fastapi~=0.73.0
formio-data~=0.3.14
httpx~=0.22.0
ilens-kafka-publisher==0.4.2
jwt-signature-validator~=0.0.1
kafka-python~=2.0.2
numpy~=1.22.2
openpyxl~=3.0.9
paho-mqtt~=1.6.1
pandas~=1.4.1
pre-commit~=2.20.0
psycopg2-binary~=2.9.3
pyjwt~=2.3.0
pymongo~=4.0.1
python-dotenv~=0.19.2
python-multipart~=0.0.5
pytz~=2021.3
pyyaml~=6.0
redis~=4.1.4
requests~=2.27.1
sqlalchemy-utils~=0.38.2
sqlalchemy==1.4.31
uvicorn~=0.17.5
\ No newline at end of file
# Created by .ignore support plugin (hsz.mobi)
### JetBrains template
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
.idea
logs
# Generated files
.idea/**/contentModel.xml
# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# Gradle
.idea/**/gradle.xml
.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# CMake
cmake-build-*/
# Mongo Explorer plugin
.idea/**/mongoSettings.xml
# File-based project format
*.iws
# IntelliJ
out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# Editor-based Rest Client
.idea/httpRequests
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
### Python template
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
### VisualStudio template
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
# User-specific files
*.rsuser
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Mono auto generated files
mono_crash.*
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
[Ww][Ii][Nn]32/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Ll]og/
[Ll]ogs/
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUnit
*.VisualState.xml
TestResult.xml
nunit-*.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET Core
project.lock.json
project.fragment.lock.json
artifacts/
# ASP.NET Scaffolding
ScaffoldingReadMe.txt
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*_wpftmp.csproj
*.log
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
coverage*.json
coverage*.xml
coverage*.info
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# NuGet Symbol Packages
*.snupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
*.appxbundle
*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- [Bb]ackup.rdl
*- [Bb]ackup ([0-9]).rdl
*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# CodeRush personal settings
.cr/personal
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
# Ionide (cross platform F# VS Code tools) working folder
.ionide/
# Fody - auto-generated XML schema
FodyWeavers.xsd
### JupyterNotebooks template
# gitignore template for Jupyter Notebooks
# website: http://jupyter.org/
*/.ipynb_checkpoints/*
.env
*.xml
*.iml
# Remove previous ipynb_checkpoints
# git rm -r .ipynb_checkpoints/
import shutil
from scripts.config.app_configurations import config
if __name__ == '__main__':
from dotenv import load_dotenv
load_dotenv()
import os.path
class KeyPath(object):
keys_path = config['DIRECTORY']['keys_path']
if not os.path.isfile(os.path.join(keys_path, "public")) or not os.path.isfile(
os.path.join(keys_path, "private")):
if not os.path.exists(keys_path):
os.makedirs(keys_path)
shutil.copy(os.path.join("assets", "keys", "public"), os.path.join(keys_path, "public"))
shutil.copy(os.path.join("assets", "keys", "private"), os.path.join(keys_path, "private"))
public = os.path.join(keys_path, "public")
private = os.path.join(keys_path, "private")
"""
This file exposes configurations from config file and environments as Class Objects
"""
import shutil
if __name__ == '__main__':
from dotenv import load_dotenv
load_dotenv()
import os.path
import sys
from configparser import ConfigParser, BasicInterpolation
class EnvInterpolation(BasicInterpolation):
"""
Interpolation which expands environment variables in values.
"""
def before_get(self, parser, section, option, value, defaults):
value = super().before_get(parser, section, option, value, defaults)
if not os.path.expandvars(value).startswith('$'):
return os.path.expandvars(value)
else:
return
try:
config = ConfigParser(interpolation=EnvInterpolation())
config.read("conf/application.conf")
except Exception as e:
print(f"Error while loading the config: {e}")
print("Failed to Load Configuration. Exiting!!!")
sys.stdout.flush()
sys.exit()
class Service:
MODULE_NAME = config["MODULE"]["name"]
HOST = config.get("SERVICE", "host")
PORT = config.getint("SERVICE", "port")
secure_cookie = config.getboolean("SERVICE", "secure_cookie", fallback=True)
verify_signature = os.environ.get("VERIFY_SIGNATURE", False)
protected_hosts = os.environ.get("PROTECTED_HOSTS", "").split(",")
class DBConf:
MONGO_URI = config.get('MONGO_DB', 'uri')
if not MONGO_URI:
print("Error, environment variable MONGO_URI not set")
sys.exit(1)
MAINTENANCE_DB_URI = config.get('POSTGRES', "maintenance")
if not MAINTENANCE_DB_URI:
print("MAINTENANCE_DB_URI env variables missing")
sys.exit(1)
ASSISTANT_DB_URI = config.get('POSTGRES', "assistant")
if not ASSISTANT_DB_URI:
print("ASSISTANT_DB_URI env variables missing")
sys.exit(1)
class KafkaConf:
host = config.get('KAFKA', 'host')
port = config.get('KAFKA', 'port')
topic = config.get('KAFKA', 'topic')
backdated_topic = config.get('KAFKA', 'history_topic')
if not any([topic, host, port]):
print("KAFKA env variables missing, continuing without Kafka/Kairos support")
audit_topic = config.get('KAFKA', 'audit_topic')
enable_sites_partition = config.getboolean("KAFKA", "ENABLE_KAFKA_PARTITION", fallback=True)
split_key = config["KAFKA"].get('KAFKA_PARTITION_KEY', 'site_id')
round_robin_enable = config.getboolean("KAFKA", "ROUND_ROBIN_PARTITION", fallback=True)
redis_db = config.getint("KAFKA", "partition_db")
class Logging:
level = config.get("LOGGING", "level", fallback="INFO")
level = level or "INFO"
print(f"Logging Level set to: {level}")
class StoragePaths:
module_name = config.get('SERVICE', 'module_name')
if not module_name:
module_name = "form_management"
base_path = os.path.join("data", module_name)
class DatabaseConstants:
metadata_db = config.get("DATABASES", "metadata_db")
if not bool(metadata_db):
metadata_db = "ilens_configuration"
ilens_assistant_db = config.get("DATABASES", "ilens_assistant")
if not bool(ilens_assistant_db):
ilens_assistant_db = "ilens_assistant"
ilens_asset_model_db = config.get("DATABASES", "ilens_asset_model")
if not bool(ilens_asset_model_db):
ilens_asset_model_db = "ilens_asset_model"
class PathToServices:
DATA_ENGINE = config.get("PATH_TO_SERVICES", "data_engine")
if not bool(DATA_ENGINE):
print("FORM_DE not set, proceeding without data engine support")
METADATA_SERVICES = config.get("PATH_TO_SERVICES", "metadata_services")
if not bool(METADATA_SERVICES):
print("METADATA_SERVICES not set, proceeding without metadata_services support")
AUDIT_PROXY = config.get("PATH_TO_SERVICES", "audit_proxy")
if not bool(AUDIT_PROXY):
print("AUDIT_PROXY not set, proceeding without audit_proxy support")
WORKFLOW_MT = config.get("PATH_TO_SERVICES", "workflow_mt")
if not bool(WORKFLOW_MT):
print("WORKFLOW_MT_PROXY not set, proceeding without audit_proxy support")
FORM_MT = config.get("PATH_TO_SERVICES", "form_mt")
if not bool(FORM_MT):
print("Error, environment variable FORM_MT not set")
sys.exit(1)
ILENS_EVENTS = config.get("PATH_TO_SERVICES", "ilens_events")
if not bool(ILENS_EVENTS):
print("Error, environment variable ILENS_EVENTS not set")
sys.exit(1)
OEE_SERVICES = config.get("PATH_TO_SERVICES", "oee_services")
if not bool(OEE_SERVICES):
print("Error, environment variable OEE_SERVICES not set")
sys.exit(1)
class PathToStorage:
BASE_PATH = config.get("DIRECTORY", "base_path")
if not BASE_PATH:
print("Error, environment variable BASE_PATH not set")
sys.exit(1)
MOUNT_DIR = config.get("DIRECTORY", "mount_dir")
if not MOUNT_DIR:
print("Error, environment variable MOUNT_DIR not set")
sys.exit(1)
MODULE_PATH = os.path.join(BASE_PATH, MOUNT_DIR.lstrip('/'))
FORM_IO_UPLOADS = os.path.join(MODULE_PATH, "form_io_uploads")
TEMPLATES_UPLOADS = os.path.join(MODULE_PATH, "templates_uploads")
LOGS_MODULE_PATH = f"{BASE_PATH}/logs{MOUNT_DIR}/"
class KeyPath:
keys_path = config['DIRECTORY']['keys_path']
if not os.path.isfile(os.path.join(keys_path, "public")) or not os.path.isfile(os.path.join(keys_path, "private")):
if not os.path.exists(keys_path):
os.makedirs(keys_path)
shutil.copy(os.path.join("assets", "keys", "public"), os.path.join(keys_path, "public"))
shutil.copy(os.path.join("assets", "keys", "private"), os.path.join(keys_path, "private"))
public = os.path.join(keys_path, "public")
private = os.path.join(keys_path, "private")
class RedisConfig:
uri = config.get("REDIS", "uri")
login_db = config["REDIS"]["login_db"]
project_tags_db = config.getint("REDIS", "project_tags_db")
class KairosConfig:
uri = config.get("KAIROS", "kairos_url")
class BackFill:
interval_in_mins = config.get("BACKFILL", "interval", fallback=60)
trigger_step_threshold = config.getint("BACKFILL", "trigger_bandwidth", fallback=300)
class EnableAuditing:
periodic_entry_auditing = config.getboolean("AUDITING", "periodic_entry_auditing", fallback=False)
form_non_periodic_auditing = config.getboolean("AUDITING", "form_non_periodic_auditing", fallback=False)
form_periodic_auditing = config.getboolean("AUDITING", "form_periodic_auditing", fallback=False)
class OtherService:
EMAIL_URL = config["PATH_TO_OTHER_SERVICES"]["email_service"]
class MQTTConf:
uri = config["MQTT"]["uri"]
host = config["MQTT"]["host"]
port = int(config["MQTT"]["port"])
publish_base_topic = config["MQTT"]["publish_base_topic"]
class EnableEvents:
enable_events = config.getboolean("EVENTS", "enable_events", fallback=True)
from scripts.config.app_configurations import DatabaseConstants
class Secrets:
LOCK_OUT_TIME_MINS = 30
leeway_in_mins = 10
unique_key = '45c37939-0f75'
token = '8674cd1d-2578-4a62-8ab7-d3ee5f9a'
issuer = "ilens"
alg = "RS256"
class StatusCodes:
SUCCESS = [200, 201, 204]
class UserCollectionKeys:
KEY_LANGUAGE = "language"
KEY_NAME = "name"
KEY_USER_ID = "user_id"
KEY_PROJECT_ID = "project_id"
KEY_USERNAME = "username"
KEY_USER_ROLE = "userrole"
KEY_EMAIL = "email"
class DatabaseNames:
ilens_configuration = DatabaseConstants.metadata_db
ilens_assistant = DatabaseConstants.ilens_assistant_db
class CollectionNames:
form_props = "form_props"
scheduled_info = "scheduled_info"
templates = "templates"
forms = "forms"
unique_id = "unique_id"
user = "user"
user_project = "user_project"
lookup_table = "lookup_table"
template_category = "template_category"
step_category = "step_category"
constants = "constants"
workflows = "workflows"
workflow_permissions = "workflow_permissions"
triggers = "triggers"
task_instance_data = "task_instance_data"
product_master = "product_master"
periodic_data = "periodic_data"
project_remarks = "project_remarks"
action_templates = "action_templates"
user_role = "user_role"
shift_details = "shift_details"
site_conf = "site_conf"
customer_projects = "customer_projects"
logbook = "logbook"
logbook_links = "logbook_links"
job_list = "job_list"
schedule_metadata = "schedule_metadata"
tasks = "task_info"
task_instance = "task_instances"
steps = "steps"
step_templates = "step_templates"
step_data_files = "step_data_files"
class CommonKeys:
KEY_USER_ID = "user_id"
KEY_PROCESS_TEMPLATE = "process_template"
KEY_SITE_TEMPLATE = "site_template"
KEY_PROCESS_TEMPLT_ID = "process_templt_id"
KEY_KEY_LIST = "key_list"
KEY_VALUE = "value"
KEY_SITE_TEMPLT_ID = "site_templt_id"
KEY_TYPE = "type"
KEY_LOOKUP = "lookup_name"
KEY_CREATED_BY = "created_by"
KEY_CREATED_TIME = "created_at"
KEY_UPDATED_AT = "updated_by"
KEY_LAST_UPDATED_TIME = "updated_at"
class StepCategories:
NON_PERIODIC = "step_category_100"
TASK_CREATION = "step_category_101"
PERIODIC = "step_category_102"
TRIGGER_BASED = "step_category_103"
class FactorsInTriggerCompletion:
CONSOLIDATED = ["end_of_day", "end_of_week"]
END_OF_DAY = "end_of_day"
END_OF_WEEK = "end_of_week"
class UniqueIdKeys:
KEY_ID = "id"
KEY_KEY = "key"
class CustomObjects:
model_types_for_psql_tables = ["production_losses"]
custom_models_to_list = ["ope_formula_calculation", "oee_formula_calculation", "rm_consumption", "util_std_norm",
"gen_std_norm",
"oee_production_losses", "oee_daily_production", "oee_master_table"]
oee_production_losses = "oee_production_losses"
class CommonConstants(object):
__iso_format__ = '%Y-%m-%dT%H:%M:%S%z'
class FormEndPoints:
api_form = "/form"
api_mobile_form_multiple = "/form_load_multiple"
api_wrk_task_details = "/mobile/wrk_task_details"
api_stage = "/stage"
api_stages_data = "/stage_data"
api_add_periodic_data = "/add_periodic_data"
api_get_tags = "/get_tags"
api_get_time_list = "/get_time_list"
api_timewise_tags = "/timewise_tags"
api_trigger = "/trigger"
api_trigger_task_completion = "/trigger_task_completion"
api_mark_task_complete = "/mark_task_complete"
api_custom = "/custom"
api_reference = "/reference"
api_save_table = "/save_table"
api_list_periodic_steps = "/list_periodic_steps"
api_get = "/get"
api_list = "/list"
api_render = "/render"
api_mobile = "/mobile"
api_remark = "/remark"
api_save = "/save"
api_send_notification = "/send_notification"
api_copy_property_values = "/copy_property_values"
api_backfill = "utils/periodic_data/auto/insert"
api_search_asset = "tags_v2/search_asset"
api_get_user_details = "ilens_config/get_user_details"
class CommentsEndPoints:
api_comment = "/comments"
api_list = "/list"
class DataEngineEndpoints:
api_schedule = "schd/task/schedule"
api_iot_param = "iot_param/get"
class OeeServicesEndpoints:
api_create_batch = "/calculator/batch_oee/calculate"
class CustomEndPoints:
api_save_table = f"custom{FormEndPoints.api_save_table}"
class StageDataEndPoints:
api_create_template = "/save"
api_list_template = "/list"
api_template_table_options = "/template_table_options"
api_delete_template = "/delete"
api_fetch_template = "/fetch"
api_download_template = "/template_download"
api_upload_data_sheet = "/upload_data_file"
api_get_templates = "/get_templates"
api_get_file_data_list = "/uploaded_file_list"
api_download_data_file = "/download_data_file"
api_delete_data_file = "/delete_data_file"
api_back_fill_data = "utils/periodic_data/direct/insert"
class OtherEndPoints:
api_send_email = "/api/v1/eim/email/send"
class EventsEndPoints:
api_create_event = "/api/event/create"
from scripts.config.app_configurations import DatabaseConstants
class DatabaseNames:
ilens_configuration = DatabaseConstants.metadata_db
ilens_assistant = DatabaseConstants.ilens_assistant_db
ilens_asset_model = DatabaseConstants.ilens_asset_model_db
class CollectionNames:
form_props = "form_props"
scheduled_info = "scheduled_info"
forms = "forms"
unique_id = "unique_id"
user = "user"
tags = "tags"
shifts = "shifts"
lookup_table = "lookup_table"
template_category = "template_category"
step_category = "step_category"
constants = "constants"
workflows = "workflows"
workflow_permissions = "workflow_permissions"
triggers = "triggers"
product_master = "product_master"
periodic_data = "periodic_data"
reference_steps = "reference_steps"
project_remarks = "project_remarks"
action_templates = "action_templates"
user_role = "user_role"
shift_details = "shift_details"
site_conf = "site_conf"
customer_projects = "customer_projects"
logbook = "logbook"
logbook_links = "logbook_links"
job_list = "job_list"
schedule_metadata = "schedule_metadata"
steps = "steps"
trigger_steps = "trigger_steps"
task_instance_data = "task_instance_data"
task_instances = "task_instances"
tasks = "task_info"
asset_model_details = "asset_model_details"
rule_targets = "rule_targets"
class TaskInstanceDataKeys:
KEY_STAGE_ID = "stage_id"
KEY_TASK_ID = "task_id"
KEY_STATUS = "status"
KEY_STEP_ID = "step_id"
class Secrets:
LOCK_OUT_TIME_MINS = 30
leeway_in_mins = 10
unique_key = '45c37939-0f75'
token = '8674cd1d-2578-4a62-8ab7-d3ee5f9a'
issuer = "ilens"
alg = "RS256"
signature_key = 'kliLensKLiLensKL'
signature_key_alg = ["HS256"]
class SiteConfCollectionKeys:
KEY_SITE_NAME = "site_name"
KEY_SITE_INFO = "site_info"
KEY_CUSTOMER_PROJECT_ID = "customer_project_id"
KEY_SITE_ID = "site_id"
KEY_PRODUCT_ENCRYPTED = "product_encrypted"
KEY_PROCESS_ID = "process_id"
class AuditingKeys:
periodic = "periodic"
trigger_based = "trigger-based"
non_periodic = "non-periodic"
user = "user"
machine = "machine"
data_published = "data_published"
success = "success"
failed = "failed"
class CustomerProjectKeys:
KEY_CUSTOMER_PROJECT_ID = "customer_project_id"
KEY_CUSTOMER_PROJECT_NAME = "customer_project_name"
class TaskInstanceDataKeys:
KEY_STAGE_ID = "stage_id"
KEY_TASK_ID = "task_id"
KEY_STATUS = "status"
KEY_STEP_ID = "step_id"
class CustomKeys:
ACTUAL_PRODUCTION_MTD = "actual_production_mtd"
CAPACITY_FOR_SHIFT_SUMMARY_MTD = "capacity_for_shift_mtd"
PRODUCTION_LOSS_MTD = "production_loss_mtd"
OPE_MTD = "ope_mtd"
class CommonStatusCode:
SUCCESS_CODES = (
200,
201,
204,
)
class SubmitAction:
refresh = "refresh"
save = "save"
view = "view"
class EmailAuth:
username = 'AllGoodNamesRGone'
password = 'comBRANSeANtamasEbICaPeC'
ui_time_format_data = {
"dd/MM/yyyy HH:mm:ss": "%d/%m/%Y %H:%M:%S",
"dd-MM-yyyy HH:mm:ss": "%d-%m-%Y %H:%M:%S",
"yyyy/dd/MM HH:mm:ss": "%Y/%d/%m %H:%M:%S",
"yyyy-dd-MM HH:mm:ss": "%Y-%d-%m %H:%M:%S",
"yyyy/MM/dd HH:mm:ss": "%Y/%m/%d %H:%M:%S",
"yyyy-MM-dd HH:mm:ss": "%Y-%m-%d %H:%M:%S",
"dd/MM/yyyy": "%d/%m/%Y",
"dd-MM-yyyy": "%d-%m-%Y",
"yyyy/dd/MM": "%Y/%d/%m",
"yyyy-dd-MM": "%Y-%d-%m",
"yyyy/MM/dd": "%Y/%m/%d",
"yyyy-MM-dd": "%Y-%m-%d",
"MM/dd/yyyy": "%m/%d/%Y",
"MM-dd-yyyy": "%m-%d-%Y",
"yyyy-dd-MM HH:mm": "%Y-%m-%d %H:%M",
"yyyy-MM-dd HH:mm": "%Y-%m-%d %H:%M",
"dd-MM HH:mm:ss": "%d-%m %H:%M:%S",
"dd/MM HH:mm:ss": "%d/%m %H:%M:%S",
"MM-dd HH:mm:ss": "%m-%d %H:%M:%S",
"MM/dd HH:mm:ss": "%m/%d %H:%M:%S",
"monthYear": "%b, %Y",
"dateMonth": "%d %b",
"monthDate": "%b %d",
"dateMonthYear": "%d %b, %Y",
"yearDateMonth": "%Y, %d %b",
"monthDateYear": "%b %d, %Y",
"yearMonthDate": "%Y, %b %d",
"MonthDateYear": "%B %d, %Y",
"HH:mm": "%H:%M",
"None": None
}
date_time_with_hour = "%d-%m-%Y %H:%M"
class StepRecordKeys:
KEY_STEP_ID = "step_id"
KEY_PROJECT_ID = "project_id"
KEY_STEP_NAME = "step_name"
class TaskKeys:
KEY_PROJECT_ID = "project_id"
KEY_TASK_ID = "task_info_id"
KEY_TASK_INSTANCE = "task_id"
KEY_TASK_CREATION_DATA = "task_creation_data"
KEY_ASSOCIATED_WORKFLOW_ID = "associated_workflow_id"
KEY_WORKFLOW_VERSION = "workflow_version"
KEY_CURRENT_STATUS = "current_status"
class BprRecordKeys:
KEY_BPR_ID = "bpr_id"
KEY_PROJECT_ID = "project_id"
class TemplateCategoryKeys:
KEY_TEMPLATE_CATEGORY_ID = "template_category_id"
KEY_TEMPLATE_NAME = "name"
class ScheduledInfoKeys:
KEY_STEP_ID = "step_id"
KEY_SCHEDULE_PROPERTIES = "schedule_properties"
KEY_SCHEDULE_ID = "schedule_id"
KEY_JOB_ID = "job_id"
class FormPropsKeys:
KEY_STEP_ID = "step_id"
KEY_FORM_INFO = "form_info"
class ComponentInstanceKeys:
KEY_WORKFLOW_COMPONENT_ID = "workflow_component_id"
KEY_NODE_PLAYGROUND_ID = "node_playground_id"
class WorkflowKeys:
KEY_WORKFLOW_ID = "workflow_id"
KEY_WORKFLOW_VERSION = "workflow_version"
KEY_PROJECT_ID = "project_id"
KEY_WORKFLOW_NAME = "workflow_name"
class TagKeys:
KEY_TAG_ID = "id"
KEY_TAG_NAME = "tag_name"
class WorkflowPermissionsKeys:
KEY_WORKFLOW_STATUS = "workflow_status"
KEY_STEP_ID = "step_id"
KEY_WORKFLOW_ID = "workflow_id"
KEY_WORKFLOW_VERSION = "workflow_version"
KEY_USER_ROLE = "user_role"
KEY_PERMISSIONS = "permissions"
class TriggerKeys:
KEY_TRIGGER_ID = "trigger_id"
KEY_TRIGGER_TYPE = "trigger_type"
KEY_ROLE = "role"
class WorkflowInstanceKeys:
KEY_WORKFLOW_INSTANCE_ID = "workflow_instance_id"
KEY_WORKFLOW_SPEC_ID = "workflow_spec_id"
KEY_VERSION_ID = "version_id"
KEY_BUILDER_ID = "builder_id"
KEY_PROJECT_ID = "project_id"
KEY_STATUS = "status"
class PeriodicDataKeys:
KEY_STEP_ID = "step_id"
KEY_DATE = "date"
KEY_DATA = "data"
KEY_MANUAL_DATA = "manual_data"
class StageDataKeys(WorkflowInstanceKeys):
KEY_STAGE_ID = "stage_id"
KEY_TEMPLATE_ID = "template_id"
KEY_DATA = "data"
KEY_STATUS = "status"
STAGE_TYPE = "stage_type"
KEY_IS_DELETED = "is_deleted"
KEY_REMARKS = "remarks"
KEY_STAGE_CONFIGURATION = "stage_configuration"
class ProjectRemarksKeys:
KEY_PROJECT_ID = "project_id"
Key_REMARKS = "remarks"
# mobility
class ProductMasterDataKeys:
KEY_ID = "id"
KEY_NAME = "name"
# mobility
class ShiftDetailsKeys:
KEY_PROJECT_ID = "project_id"
KEY_USER_ID = "user_id"
KEY_SHIFT_END_TIME = "shift_end_time"
KEY_SHIFT_START_TIME = "shift_start_time"
KEY_IS_STARTED = "is_started"
class StepsCategoryKeys:
pass
class TemplateRecordKeys:
pass
class ReferenceDataKeys:
KEY_STEP_ID = "step_id"
KEY_DATE = "date"
KEY_PROPERTIES = "properties"
KEY_DATA = "data"
KEY_STEP_CATEGORY = "step_category"
KEY_ENTITY_NAME = "entity_name"
KEY_EVENT_ID = "event_id"
KEY_TASK_ID = "task_id"
class StepTemplateKeys:
KEY_PROJECT_ID = "project_id"
KEY_TEMPLATE_ID = "template_id"
KEY_TEMPLATE_NAME = "template_name"
KEY_LOGBOOK_ID = "logbook_id"
class StepDataFileKeys:
KEY_PROJECT_ID = "project_id"
KEY_TEMPLATE_ID = "template_id"
KEY_FILE_ID = "file_id"
KEY_FILE_NAME = "file_name"
class AssetDetailsKeys:
KEY_PROJECT_ID = "project_id"
KEY_ASSET_MODEL_NAME = "asset_model_name"
KEY_ASSET_MODEL_ID = "asset_model_id"
KEY_ASSET_DESCRIPTION = "asset_description"
KEY_ASSET_VERSION = "asset_version"
KEY_ASSET_MODEL_ICON = "asset_model_icon"
class DataNotFound(Exception):
pass
class KairosDBError(Exception):
pass
class LogbookConstants:
external_action_data = [
{
"action": "addnew",
"label": "Create New",
"type": "addnew"
}
]
table_actions_action_data = [
{
"action": "edit",
"label": "Edit",
"type": "edit",
"icon-class": "fa fa-pencil"
},
{
"action": "delete",
"label": "Delete",
"type": "delete",
"icon-class": "fa fa-trash"
}
]
headerContent = [
{
"value": "logbook_name",
"label": "Logbook Name",
"enable_column_search": True,
"header_type": "text",
"action": {
"action": "edit",
"type": "edit",
"label": "Edit"
},
"enableClick": True,
"style": "indicate-item cursor-pointer"
},
{
"value": "logbook_description",
"label": "Description",
"enable_column_search": True,
"header_type": "text"
},
# {"value": "workflow_name",
# "label": "Workflow",
# "enable_column_search": True,
# "header_type": "select",
# "options": [
# ]
# },
{
"label": "Business Process Tags",
"value": "business_process_tags",
"enable_column_search": True,
"header_type": "text"
},
{
"value": "updated_on",
"label": "Last Modified on",
"enable_column_search": True,
"header_type": "date_range"
},
{
"value": "updated_by",
"label": "Last Modified by",
"enable_column_search": True,
"header_type": "select",
"options": [
{
"label": "Login module",
"value": "Login module"
}
]
},
]
class StageConstants:
mark_as_completed = {
"label": "Mark Step as Completed",
"value": "mark_complete",
"type": "toggle",
"check_completion": False,
"properties": {
"btn_class": "btn-primary",
"bg_color": "#0f62fe",
"icon": "fa-floppy-o",
"class": "pull-right"
}
}
mark_complete_icon = "fa fa-check-circle text-success"
mark_complete_icon_color = "#20f952"
class TemplateListConstants:
external_action_data = [
{
"action": "addnew",
"label": "Create New",
"type": "addnew"
}
]
table_actions_action_data = [
{
"action": "edit",
"label": "Edit",
"type": "edit",
"icon-class": "fa fa-pencil"
},
{
"action": "delete",
"label": "Delete",
"type": "delete",
"icon-class": "fa fa-trash"
}
]
headerContent = [
{
"value": "step_name",
"label": "Step Name",
"enable_column_search": True,
"header_type": "text",
"action": {
"action": "edit",
"label": "edit",
"type": "edit",
"icon-class": "fa-eye"
},
"enableClick": True,
"style": "indicate-item cursor-pointer"
},
{
"value": "description",
"label": "Description",
"enable_column_search": True,
"header_type": "text"
},
{
"value": "created_by",
"label": "Created By",
"enable_column_search": True,
"header_type": "select",
"options": [
{
"label": "Login module",
"value": "Login module"
}
]
},
{
"value": "created_on",
"label": "Created On",
"enable_column_search": True,
"header_type": "date_range"
},
{
"value": "step_category",
"label": "Step Type",
"enable_column_search": True,
"header_type": "select",
"options": [
]
}
]
class TemplateConstants:
external_action_data = [
]
table_actions_action_data = [
{
"action": "delete",
"label": "Delete",
"type": "delete",
"icon-class": "fa fa-trash"
}
]
headerContent = [
{
"value": "template_name",
"label": "Template Name",
"enable_column_search": True,
"header_type": "text",
"action": {
"action": "edit",
"type": "edit",
"label": "Edit"
},
"enableClick": True,
"style": "indicate-item cursor-pointer"
},
{
"value": "logbook_name",
"label": "Logbook Name",
"enable_column_search": True,
"header_type": "text"
},
{
"value": "updated_by",
"label": "Last Modified by",
"enable_column_search": True,
"header_type": "select",
"options": [
]
},
{
"value": "updated_on",
"label": "Last Modified on",
"enable_column_search": True,
"header_type": "date_range"
},
]
class TemplateStorage:
templates_files = "templates"
upload_data_files = "data_files"
class WorkflowConstants:
external_action_data = [
{
"action": "addnew",
"label": "Create New",
"type": "addnew"
}
]
table_actions_action_data = [
{
"action": "edit",
"label": "Edit",
"type": "edit",
"icon-class": "fa fa-pencil"
},
{
"action": "delete",
"label": "Delete",
"type": "delete",
"icon-class": "fa fa-trash"
}
]
headerContent = [
{
"value": "workflow_name",
"label": "Workflow Name",
"enable_column_search": True,
"header_type": "text",
"action": {
"action": "edit",
"label": "Edit",
"type": "edit",
"icon-class": "fa fa-pencil"
},
"enableClick": True,
"style": "indicate-item cursor-pointer"
},
{
"value": "workflow_description",
"label": "Description",
"enable_column_search": True,
"header_type": "text"
},
{
"value": "tags",
"label": "Tags",
"enable_column_search": True,
"header_type": "text"
},
{
"value": "created_by",
"label": "Created By",
"enable_column_search": True,
"header_type": "select",
"options": [
{
"label": "Login module",
"value": "Login module"
}
]
},
{
"value": "created_on",
"label": "Created On",
"enable_column_search": True,
"header_type": "date_range"
}
]
import time
from fastapi import Request
from scripts.config.app_configurations import BackFill
from scripts.constants.date_constants import ui_time_format_data
from scripts.core.engine.data_engine import DataEngine
from scripts.core.schemas.stages import TriggerData
from scripts.db import mongo_client, PeriodicData, StepCollection, TriggerStepCollection
from scripts.errors import ImplementationError, RestrictBlanks
from scripts.logging.logging import logger
from scripts.utils.common_utils import CommonUtils
class ComponentManipulation:
def __init__(self, project_id=None):
self.step_conn = StepCollection(mongo_client=mongo_client, project_id=project_id)
self.periodic_conn = PeriodicData(mongo_client, project_id=project_id)
self.trigger_step_conn = TriggerStepCollection(mongo_client=mongo_client, project_id=project_id)
self.common_utils = CommonUtils(project_id=project_id)
self.data_engine = DataEngine(project_id=project_id)
self.counter = 0
def parse_component_json(self, abs_component, time_in_str, data_engine_payload, restrict_blanks):
"""real_time: bool, if enabled, values will be fetched from redis otherwise kairos"""
for itr, component in enumerate(abs_component):
if "components" in component.keys():
for eac_itr, each_component in enumerate(component["components"]):
if "rows" in each_component.keys() and isinstance(each_component["rows"], list):
head_rows = each_component["rows"][:1]
tags = [row["components"][0].get("properties", dict()).get("master_tag", "")
for row in head_rows[0] if len(row["components"]) > 0][1:]
data_engine_payload.update(tag_list=tags)
data_from_kairos = self.data_engine.get_tag_values(**data_engine_payload)
if not data_from_kairos:
data_from_kairos = dict()
if data_from_kairos.keys() not in tags:
if not any([data_from_kairos.values()]) and restrict_blanks:
raise RestrictBlanks
_ = {data_from_kairos.update({missing_tag: "-"}) for missing_tag in tags if
missing_tag not in data_from_kairos.keys()}
incremented_rows = int(each_component.get("numRows")) + 1
cell_list, form_props = self.generate_prop_names(tags, time_in_str, head_rows[0], )
each_component["numRows"] = incremented_rows
each_component["rows"].append(cell_list)
return True, form_props, data_from_kairos
elif "columns" in each_component.keys():
self.parse_component_json(each_component["columns"], time_in_str, data_engine_payload,
restrict_blanks)
elif "components" in each_component.keys():
self.parse_component_json(each_component["components"], time_in_str, data_engine_payload,
restrict_blanks)
elif "rows" in component.keys() and isinstance(component["rows"], list):
head_rows = component["rows"][:1]
tags = [row["components"][0].get("properties", dict()).get("master_tag", "") for row in head_rows[0]
if len(row["components"]) > 0][1:]
data_engine_payload.update(tag_list=tags)
data_from_kairos = self.data_engine.get_tag_values(**data_engine_payload)
if not data_from_kairos:
data_from_kairos = dict()
if data_from_kairos.keys() not in tags:
if not any([data_from_kairos.values()]) and restrict_blanks:
raise RestrictBlanks
_ = {data_from_kairos.update({missing_tag: "-"}) for missing_tag in tags if
missing_tag not in data_from_kairos.keys()}
incremented_rows = int(component.get("numRows")) + 1
cell_list, form_props = self.generate_prop_names(tags, time_in_str, head_rows[0])
component["numRows"] = incremented_rows
component["rows"].append(cell_list)
return True, form_props, data_from_kairos
return False, dict(), dict()
def table_component(self, option, real_time, step_id, data: TriggerData, project_id, request_obj: Request,
restrict_blanks):
try:
if data.tag_id:
self.common_utils.publish_data_to_kafka(tag_dict={int(time.time() * 1000): {data.tag_id: data.status}},
project_id=project_id)
func = getattr(self, option)
return func(step_id, data, real_time, project_id, request_obj, restrict_blanks)
except Exception as e:
logger.exception(e)
raise
def delete_row(self, step_id, data: TriggerData, real_time, project_id, request_obj, restrict_blanks):
try:
if not data.trigger_time:
self.trigger_step_conn.delete_many_triggers(step_id)
return
date_in_str = str(self.common_utils.get_time_by_ts(data.trigger_time / 1000, data.tz,
time_format=ui_time_format_data["yyyy-MM-dd"]))
self.trigger_step_conn.delete_one_step(step_id, date_in_str)
except Exception as e:
logger.exception(e)
raise
def add_row(self, step_id, data: TriggerData, real_time, project_id, request_obj, restrict_blanks):
try:
if not data.trigger_time:
data.trigger_time = int(time.time() * 1000)
else:
data.from_time = data.trigger_time - BackFill.trigger_step_threshold
data.to_time = data.trigger_time
time_in_str = str(
self.common_utils.get_time_by_ts(data.trigger_time / 1000, data.tz,
time_format=ui_time_format_data["HH:mm"]))
date_in_str = str(self.common_utils.get_time_by_ts(data.trigger_time / 1000, data.tz,
time_format=ui_time_format_data["yyyy-MM-dd"]))
component_json, from_trigger_step, time_list = self.get_trigger_template(step_id, date_in_str,
time_in_str)
if not component_json:
return dict()
self.counter = component_json.get("event_counter", 0) + 1
component = component_json.get("field_elements", dict()).get("components")
data_engine_payload = dict(ignore_stale=real_time,
last_data=real_time,
tz=data.tz,
request_obj=request_obj)
if data.from_time and data.to_time:
data_engine_payload.update(from_time=data.from_time, to_time=data.to_time)
row_added, form_props, data_from_kairos = self.parse_component_json(component, time_in_str,
data_engine_payload, restrict_blanks)
field_props_dot_not = {f'form_info.{k}': v for k, v in form_props.items()}
if not row_added:
return dict()
self.save_trigger_template(date_in_str, component, step_id, field_props_dot_not, time_list)
if data.manual_entry:
data_to_kafka = {data.trigger_time: data_from_kairos}
self.common_utils.publish_data_to_kafka(data_to_kafka, project_id)
return data_from_kairos
except Exception as e:
logger.exception("Failed at trigger add_row", {e})
raise
def save_trigger_template(self, t_date, component, parent_step_id, field_props, time_list):
self.trigger_step_conn.modify_component_json(parent_step_id, t_date, component, field_props, time_list,
counter=self.counter)
def get_trigger_template(self, parent_step_id, t_date, time_to_trigger):
try:
from_trigger_col = True
component_json = self.trigger_step_conn.fetch_one_step(step_id=parent_step_id, date=t_date)
if not component_json:
component_json = self.step_conn.fetch_one_step(step_id=parent_step_id)
from_trigger_col = False
time_list = component_json.get("time_triggered_for", [])
if time_to_trigger in time_list:
raise ImplementationError(f"This stage has already been triggered for {time_to_trigger}")
time_list.append(time_to_trigger)
return component_json, from_trigger_col, time_list
except Exception as e:
logger.error(e)
raise
@staticmethod
def dict_filler(key_name, tag, _time, form_props, manual_entry=False, field_type="number", entity_key=None):
key_name = key_name.replace('$', "_").replace(":", "_")
dict_formed = {
"label": "Label",
"hideLabel": True,
"tableView": True,
"key": key_name,
"properties": {},
"type": field_type,
"input": True
}
if tag:
dict_formed["properties"].update({
"tag": tag,
"time": _time,
"time_associated": "true"
})
if manual_entry:
dict_formed["properties"].update({"manual_entry": "true"})
if entity_key:
dict_formed["properties"].update({"entity_key": entity_key})
form_props.update({key_name: dict_formed["properties"]})
return dict(components=[dict_formed])
def generate_prop_names(self, only_tags, time_, head_rows, ):
form_props = dict()
mapped_list = list()
manual_entry_list = [row["components"][0].get("properties", dict()).get(
"master_manual_entry", "false") == "true" for row in head_rows if
len(row["components"]) > 0][1:]
entity_key_list = [row["components"][0].get("properties", dict()).get("master_entity_key", "") for row in
head_rows if len(row["components"]) > 0][1:]
field_types = [row["components"][0].get("properties", dict()).get("master_field_type", "number") for row in
head_rows if len(row["components"]) > 0][1:]
for itr, x in enumerate(only_tags):
field = self.dict_filler(key_name=f"{x}_event_{self.counter}",
tag=x,
_time=time_,
form_props=form_props,
manual_entry=manual_entry_list[itr],
field_type=field_types[itr],
entity_key=entity_key_list[itr])
mapped_list.append(field)
time_field = {"components": [{
"label": "Time",
"hideLabel": True,
"disabled": True,
"tableView": True,
"defaultValue": time_,
"key": f"event_{self.counter}",
"type": "time",
"input": True,
"inputMask": "99:99"
}]}
mapped_list.insert(0, time_field)
return mapped_list, form_props
import os
import httpx
from scripts.config.app_configurations import PathToServices
from scripts.constants.api import OeeServicesEndpoints
from scripts.constants.app_constants import CommonStatusCode
from scripts.constants.date_constants import ui_time_format_data
from scripts.core.engine.task_engine import TaskEngine
from scripts.core.schemas.forms import CustomActionsModel
from scripts.db import mongo_client, TaskInstance
from scripts.db.mongo.ilens_assistant.collections.logbook import LogbookInfo
from scripts.db.mongo.ilens_configuration.aggregations.config_aggregate import ConfigAggregate
from scripts.db.mongo.ilens_configuration.collections.customer_projects import CustomerProjects
from scripts.errors import InternalError
from scripts.logging.logging import logger
from scripts.utils.common_utils import CommonUtils
class CustomAction:
def __init__(self, custom_action: CustomActionsModel):
self.custom_model: CustomActionsModel = custom_action
self.customer_projects_con = CustomerProjects(mongo_client=mongo_client)
self.config_aggregate = ConfigAggregate()
self.task_engine = TaskEngine(project_id=self.custom_model.project_id)
self.task_inst_conn = TaskInstance(mongo_client, project_id=custom_action.project_id)
self.logbook_conn = LogbookInfo(mongo_client=mongo_client, project_id=custom_action.project_id)
self.common_utils = CommonUtils()
self.create_batch_api = f"{PathToServices.OEE_SERVICES}{OeeServicesEndpoints.api_create_batch}"
def trigger_action(self):
try:
site_templates = self.customer_projects_con.get_project_data_by_aggregate(
self.config_aggregate.get_project_template(self.custom_model.project_id))
site_templates = site_templates[0].get("data") if bool(site_templates) else []
hierarchy_id_str = ""
task_data = self.task_inst_conn.find_by_task_id(task_id=self.custom_model.task_details.task_id)
logbook_data = self.logbook_conn.find_by_id(task_data.logbook_id)
if hierarchy := self.task_engine.get_hierarchy(logbook_data.dict(), task_data.dict()):
hierarchy_id_str = self.task_engine.get_hierarchy_string(hierarchy, site_templates)
task_creation_time = task_data.meta.get("created_at")
start_property_name = os.environ.get("OEE_START_TIME_KEY", default="oee_start_time")
prod_start_time = self.common_utils.get_task_time(task_time=task_creation_time,
custom_model=self.custom_model,
task_property_name=start_property_name)
prod_start_time = self.common_utils.get_iso_format(timestamp=int(prod_start_time.timestamp()),
timezone=self.custom_model.tz,
timeformat=ui_time_format_data["yyyy-MM-dd HH:mm:ss"])
payload = dict(reference_id=task_data.reference_id,
hierarchy=hierarchy_id_str,
prod_start_time=prod_start_time,
batch_type="create",
project_id=self.custom_model.project_id,
tz=self.custom_model.tz)
with httpx.Client() as client:
resp = client.post(url=self.create_batch_api, cookies=self.custom_model.request_obj.cookies,
json=payload, timeout=15)
logger.debug(f"Resp Code:{resp.status_code}")
if resp.status_code not in CommonStatusCode.SUCCESS_CODES:
logger.error(f"Failed while calling custom API: {resp.status_code}")
# raise InternalError(f"API not callable: Status - {resp.status_code}")
if resp.headers.get('Content-Type').startswith('application/json') or resp.headers.get(
'content-type').startswith('application/json'):
message = resp.json()
else:
message = dict(message="Unable to decode response, API Triggered")
return True, message.get("message", "Batch Created successfully")
except InternalError:
raise
except Exception as e:
logger.error(f"Exception occurred while creating batch: {e}")
import os
import time
from datetime import datetime
import httpx
import pytz
from scripts.config.app_configurations import PathToServices
from scripts.constants.api import OeeServicesEndpoints
from scripts.constants.app_constants import CommonStatusCode
from scripts.constants.date_constants import ui_time_format_data
from scripts.core.engine.task_engine import TaskEngine
from scripts.core.schemas.forms import CustomActionsModel
from scripts.db import mongo_client, TaskInstance
from scripts.db.mongo.ilens_assistant.collections.logbook import LogbookInfo
from scripts.db.mongo.ilens_configuration.aggregations.config_aggregate import ConfigAggregate
from scripts.db.mongo.ilens_configuration.collections.customer_projects import CustomerProjects
from scripts.errors import InternalError
from scripts.logging.logging import logger
from scripts.utils.common_utils import CommonUtils
class CustomAction:
def __init__(self, custom_action: CustomActionsModel):
self.custom_model: CustomActionsModel = custom_action
self.customer_projects_con = CustomerProjects(mongo_client=mongo_client)
self.config_aggregate = ConfigAggregate()
self.task_engine = TaskEngine(project_id=self.custom_model.project_id)
self.task_inst_conn = TaskInstance(mongo_client, project_id=custom_action.project_id)
self.logbook_conn = LogbookInfo(mongo_client=mongo_client, project_id=custom_action.project_id)
self.common_utils = CommonUtils()
self.create_batch_api = f"{PathToServices.OEE_SERVICES}{OeeServicesEndpoints.api_create_batch}"
def trigger_action(self):
try:
site_templates = self.customer_projects_con.get_project_data_by_aggregate(
self.config_aggregate.get_project_template(self.custom_model.project_id))
site_templates = site_templates[0].get("data") if bool(site_templates) else []
hierarchy_id_str = ""
task_data = self.task_inst_conn.find_by_task_id(task_id=self.custom_model.task_details.task_id)
logbook_data = self.logbook_conn.find_by_id(task_data.logbook_id)
if hierarchy := self.task_engine.get_hierarchy(logbook_data.dict(), task_data.dict()):
hierarchy_id_str = self.task_engine.get_hierarchy_string(hierarchy, site_templates)
task_creation_time = task_data.meta.get("created_at")
task_completion_time = task_data.meta.get("completed_at")
start_property_name = os.environ.get("OEE_START_TIME_KEY", default="oee_start_time")
end_property_name = os.environ.get("OEE_END_TIME_KEY", default="oee_end_time")
prod_start_time = self.common_utils.get_task_time(task_time=task_creation_time,
custom_model=self.custom_model,
task_property_name=start_property_name)
prod_end_time = self.common_utils.get_task_time(task_time=task_completion_time,
custom_model=self.custom_model,
task_property_name=end_property_name,
task_type="complete")
if not prod_end_time:
prod_start_time = task_completion_time / 1000 if task_completion_time else time.time()
if prod_end_time > prod_start_time:
prod_end_time = datetime.now(tz=pytz.timezone(self.custom_model.tz))
prod_end_time = self.common_utils.get_iso_format(
timestamp=int(prod_end_time.timestamp()),
timezone=self.custom_model.tz,
timeformat=ui_time_format_data["yyyy-MM-dd HH:mm:ss"])
prod_start_time = self.common_utils.get_iso_format(timestamp=int(prod_start_time.timestamp()),
timezone=self.custom_model.tz,
timeformat=ui_time_format_data["yyyy-MM-dd HH:mm:ss"])
payload = dict(reference_id=task_data.reference_id,
hierarchy=hierarchy_id_str,
project_id=self.custom_model.project_id,
tz=self.custom_model.tz,
prod_start_time=prod_start_time,
prod_end_time=prod_end_time)
with httpx.Client() as client:
resp = client.post(url=self.create_batch_api, cookies=self.custom_model.request_obj.cookies,
json=payload, timeout=15)
logger.debug(f"Resp Code:{resp.status_code}")
if resp.status_code not in CommonStatusCode.SUCCESS_CODES:
logger.error(f"Failed while calling custom API: {resp.status_code}")
# raise InternalError(f"API not callable: Status - {resp.status_code}")
if resp.headers.get('Content-Type').startswith('application/json') or resp.headers.get(
'content-type').startswith('application/json'):
message = resp.json()
else:
message = dict(message="Unable to decode response, API Triggered")
return True, message.get("message", "Batch Created successfully")
except InternalError:
raise
except Exception as e:
logger.error(f"Exception occurred in while finishing batch: {e}")
import time
from datetime import datetime
import pytz
import requests
from scripts.config.app_configurations import PathToServices
from scripts.constants.app_constants import CommonStatusCode
from scripts.core.schemas.forms import CustomActionsModel
from scripts.db import TaskInstance
from scripts.db import mongo_client
from scripts.logging.logging import logger
from scripts.utils.stage_parser import StageParser
class CustomAction:
def __init__(self, custom_action: CustomActionsModel):
self.custom_model: CustomActionsModel = custom_action
self.task_inst_conn = TaskInstance(mongo_client, project_id=custom_action.project_id)
self.stage_parser = StageParser(project_id=custom_action.project_id)
def trigger_action(self):
try:
left_stages = self.stage_parser.get_stage_parser(self.custom_model.task_details.stages).get("left", [])
self.custom_model.task_details.meta.update({"completed_at": int(time.time() * 1000)})
task_meta = {"meta": self.custom_model.task_details.meta, "current_stage": left_stages[-1]}
self.task_inst_conn.update_instance_task(task_id=self.custom_model.task_details.task_id, data=task_meta,
upsert=False)
insert_json = {"task_completed_at": datetime.now(tz=pytz.timezone(self.custom_model.tz)).isoformat()}
request_json = {"service_type": 'update', "data": {"task_id": self.custom_model.task_details.task_id,
"project_id": self.custom_model.project_id,
"data": insert_json}}
try:
api_url = f'{PathToServices.AUDIT_PROXY}/task/tracker'
resp = requests.post(url=api_url, cookies=self.custom_model.request_obj.cookies,
json=request_json)
logger.debug(f"Resp Code:{resp.status_code}")
if resp.status_code in CommonStatusCode.SUCCESS_CODES:
response = resp.json()
logger.debug(f"Response:{response}")
except requests.exceptions.ConnectionError as e:
logger.exception(e.args)
return False, False
except Exception as e:
logger.error(f"Exception occurred in marking stage complete: {e}")
import traceback
from datetime import datetime
import pytz
from scripts.core.schemas.forms import CustomActionsModel
from scripts.db import User, mongo_client
from scripts.logging.logging import logger
from scripts.utils.mqtt_util import push_notification
class CustomAction:
def __init__(self, custom_action: CustomActionsModel):
self.custom_model: CustomActionsModel = custom_action
self.user_conn = User(mongo_client)
def trigger_action(self):
notification = dict(
type="ilens_assistant",
message=f"{self.custom_model.action.get('message', '')}",
notification_message="Notification Generated Successfully",
notification_status="success",
available_at=datetime.now().astimezone(
pytz.timezone(self.custom_model.tz)).strftime("%d-%m-%Y %I:%M%p"),
mark_as_read=False
)
try:
user_data = self.user_conn.find_user_data_with_roles(self.custom_model.action.get("user_roles"),
project_id=self.custom_model.project_id)
for each in user_data:
push_notification(notification, each.get("user_id"))
return False, False
except Exception as e:
notification.update(type="ilens_assistant",
message="Notification failed to generate",
notification_message="Failed to send notification",
notification_status="failed")
logger.error(f"Error while sending notification {e.args}")
logger.error(traceback.format_exc())
import httpx
from scripts.constants import StatusCodes
from scripts.core.schemas.custom_models import CustomRestAPIRequest
from scripts.core.schemas.forms import CustomActionsModel
from scripts.core.schemas.stages import APIAction
from scripts.errors import InternalError
from scripts.logging.logging import logger
class CustomAction:
def __init__(self, custom_action: CustomActionsModel):
self.custom_model: CustomActionsModel = custom_action
def trigger_action(self):
try:
request_object = APIAction(**self.custom_model.action)
try:
headers = {
'login-token': self.custom_model.request_obj.headers.get('login-token',
self.custom_model.request_obj.cookies.get(
'login-token')),
'projectId': self.custom_model.request_obj.cookies.get("projectId",
self.custom_model.request_obj.cookies.get(
"project_id",
self.custom_model.request_obj.headers.get(
"projectId"))),
'userId': self.custom_model.request_obj.cookies.get("userId",
self.custom_model.request_obj.cookies.get(
"user_id",
self.custom_model.request_obj.headers.get(
"userId")))}
if request_object.request_type == "POST":
date = int(self.custom_model.date) / 1000 if self.custom_model.date else 0
request_json = CustomRestAPIRequest(submitted_data=self.custom_model.submitted_data,
stage_id=self.custom_model.stage_id,
project_id=self.custom_model.project_id,
task_id=self.custom_model.task_details.task_id,
tz=self.custom_model.tz, date=date)
logger.info(f"RESTAPI POST PAYLOAD: {request_json.dict()}")
logger.info(f"Headers: {headers} , Cookies: {self.custom_model.request_obj.cookies}")
with httpx.Client() as client:
resp = client.post(url=request_object.api, cookies=self.custom_model.request_obj.cookies,
json=request_json.dict(), headers=headers, timeout=15)
elif request_object.request_type == "GET":
with httpx.Client() as client:
resp = client.get(url=request_object.api, cookies=self.custom_model.request_obj.cookies,
headers=headers,
timeout=15)
else:
raise NotImplementedError
except Exception as e:
logger.error(f"Failed in calling REST API: {e}")
raise InternalError(f"API not callable: {e}") from e
logger.debug(f'{resp.status_code},{resp.text}')
if resp.status_code not in StatusCodes.SUCCESS:
logger.error(f"Failed while calling custom API: {resp.status_code}")
raise InternalError(f"API not callable: Status - {resp.status_code}")
if resp.headers.get('Content-Type').startswith('application/json') or resp.headers.get(
'content-type').startswith('application/json'):
message = resp.json()
else:
message = dict(message="Unable to decode response, API Triggered")
logger.info(f"Message returned in Custom API: {message}")
return True, message.get("message", "API Triggered successfully")
except InternalError:
raise
except Exception as e:
logger.error(f"Exception occurred in rest_api def: {e}")
import time
import traceback
import httpx
from scripts.config.app_configurations import OtherService, PathToServices
from scripts.constants.api import OtherEndPoints, FormEndPoints
from scripts.constants.app_constants import EmailAuth, CommonStatusCode
from scripts.core.schemas.forms import CustomActionsModel
from scripts.core.schemas.other_schemas import EmailRequest
from scripts.db import User
from scripts.db import mongo_client
from scripts.db.mongo.ilens_configuration.collections.rule_targets import RuleTargets
from scripts.logging.logging import logger
class CustomAction:
def __init__(self, custom_action: CustomActionsModel):
self.custom_model: CustomActionsModel = custom_action
self.user_conn = User(mongo_client)
self.rule_targets = RuleTargets(mongo_client)
def get_target_details(self, target_id):
try:
return template.get("data", {}).get("emailId", []) if (
template := self.rule_targets.find_one({"rule_target_id": target_id})) else []
except Exception as e:
logger.exception(f"Exception occurred while fetching target details {e}")
return False
def trigger_action(self):
try:
api_email = f"{OtherService.EMAIL_URL}{OtherEndPoints.api_send_email}"
if self.custom_model.action.get("emailSelectionType", "") == "target":
recipients, payload, mail_ids = self.custom_model.action.get("target_id", ""), [], []
else:
recipients, payload, mail_ids = self.custom_model.action.get("user_roles", []), [], []
if recipients:
if self.custom_model.action.get("emailSelectionType", "") != "target":
user_data = self.get_user_email_ids(self.custom_model, recipients)
mail_ids.extend(each.get("email", "") for each in user_data.get("users", []))
else:
mail_ids = self.get_target_details(recipients)
payload = EmailRequest(receiver_list=mail_ids, from_name="iLens Assistant Tasks",
content=self.custom_model.action.get("message", ""),
subject=self.custom_model.action.get("subject", ""))
logger.info(f"content attached in mail for {self.custom_model.task_details.task_id}")
if payload:
with httpx.Client() as client:
for _ in range(3):
resp = client.post(url=api_email, json=payload.dict(),
auth=(EmailAuth.username, EmailAuth.password))
logger.debug(f"Resp Code:{resp.status_code}")
if resp.status_code in CommonStatusCode.SUCCESS_CODES:
scheduler_response = resp.json()
logger.debug(f"Email Response:{scheduler_response}")
return True
time.sleep(3)
raise RuntimeError("Email Service Not Available")
except Exception as e:
logger.error(f"Error while sending email {e.args}")
logger.error(traceback.format_exc())
return False, False
finally:
return False, False
def get_user_email_ids(self, request_data, roles):
payload = dict(project_id=request_data.project_id, user_role_list=roles, keys=["user"])
api_get_user_details = f"{PathToServices.METADATA_SERVICES}{FormEndPoints.api_get_user_details}"
with httpx.Client() as client:
for _ in range(3):
cookies = self.custom_model.request_obj.cookies
resp = client.post(url=api_get_user_details, json=payload,
cookies=cookies)
logger.debug(f"Resp Code:{resp.status_code}")
if resp.status_code in CommonStatusCode.SUCCESS_CODES:
_response = resp.json()
logger.debug(f"MetaData Response:{_response}")
return _response.get("data", {})
time.sleep(3)
import pandas as pd
from scripts.constants.app_constants import CustomKeys
from scripts.db import PeriodicData, mongo_client
from scripts.logging.logging import logger
from scripts.utils.data_processor import ProcessData
class CustomImplementations:
def __init__(self, project_id=None):
self.processor = ProcessData(project_id=project_id)
self.periodic_conn = PeriodicData(mongo_client, project_id=project_id)
def form_data_df(self,
data_for_day,
tz, current_day=False):
try:
day_df = pd.DataFrame(columns=['tag', 'time', 'values']) if not current_day else pd.DataFrame(
columns=['tag', 'time', 'values', 'default'])
for each_time in data_for_day:
val = pd.DataFrame(each_time)
val["ts"] = self.processor.convert_series_format(val["ts"], tz, "%H:%M")
val = val.reset_index().rename(columns={"index": "tag", "ts": "time"})
day_df = pd.concat([day_df, val], ignore_index=True)
return day_df
except Exception as e:
logger.error(f"Error in custom implementation in form_data_df, {e}")
def relative_day(self,
relative_data,
attribute,
form_df,
tz, ):
try:
relative_day_base = self.form_data_df(relative_data, tz)
round_relative_day = self.processor.round_off(relative_day_base, "values")
relative_day_with_attr = self.processor.merge_with_another_df(form_df, round_relative_day,
merge_on=['tag', 'time'])
# Case: No next_day attribute, Handle: empty df returned to prevent overwriting current_day df
if attribute not in relative_day_with_attr:
base_df = pd.DataFrame(columns=['tag', 'time', 'values', 'previous_day', 'next_day', 'default'])
return base_df
relative_day_df = relative_day_with_attr[relative_day_with_attr[attribute] == "true"]
return relative_day_df
except Exception as e:
logger.error(f"Error in custom implementation of relative_day, {e}")
def default_day(self,
record_in_db,
attribute,
default_value,
form_df,
tz, ):
try:
relative_day_base = self.form_data_df(record_in_db, tz)
round_relative_day = self.processor.round_off(relative_day_base, "values")
relative_day_with_attr = self.processor.merge_with_another_df(form_df, round_relative_day,
merge_on=['tag'])
if attribute not in relative_day_with_attr:
base_df = pd.DataFrame(columns=['tag', 'time', 'values', 'previous_day', 'next_day', 'default'])
return base_df
relative_day_df = relative_day_with_attr[relative_day_with_attr[attribute] == default_value]
return relative_day_df
except Exception as e:
logger.error(f"Error in custom implementation of last_value, {e}")
@staticmethod
def merge_relative(*args):
try:
merged_df = pd.concat(args).drop_duplicates(['tag', 'time', 'previous_day', 'next_day'],
keep='last')
field_props = merged_df.set_index("prop")["values"].to_dict()
return field_props
except Exception as e:
logger.error(f"Error in custom implementation of merge_relative, {e}")
def month_to_date(self, step_id, to_date, form_df):
if "calculate_mtd" in form_df:
mtd_df = form_df[form_df['calculate_mtd'] == "true"]
ref_keys = mtd_df['mtd_on_key'].to_dict()
mtd_list = self.periodic_conn.find_mtd(step_id, to_date, ref_keys)
if not mtd_list:
return {}
mtd = mtd_list[0]
mtd.pop("_id", None)
if {CustomKeys.ACTUAL_PRODUCTION_MTD, CustomKeys.CAPACITY_FOR_SHIFT_SUMMARY_MTD}.issubset(set(mtd)):
ope_calculation = mtd[CustomKeys.ACTUAL_PRODUCTION_MTD] / mtd[
CustomKeys.CAPACITY_FOR_SHIFT_SUMMARY_MTD] * 100
if mtd[CustomKeys.CAPACITY_FOR_SHIFT_SUMMARY_MTD] != 0:
mtd.update({CustomKeys.OPE_MTD: ope_calculation})
else:
mtd.update({CustomKeys.OPE_MTD: 0})
mtd.update({CustomKeys.PRODUCTION_LOSS_MTD: 100 - mtd[CustomKeys.OPE_MTD]})
return mtd
return {}
def time_associated(self,
form_df,
data_req,
request_data,
next_record,
prv_record,
latest_record):
try:
form_df = form_df[form_df['time_associated'] == "true"].reset_index().rename(
columns={"index": "prop"})
form_df_time = form_df.copy()
final_df = self.form_data_df(data_req, request_data.tz, current_day=True)
rounded_df = self.processor.round_off(final_df, "values")
current_day = self.processor.merge_with_another_df(form_df_time, rounded_df, merge_on=['tag', 'time'])
if "next_day" in current_day:
current_day = current_day[current_day['next_day'] != "true"]
if "previous_day" in current_day:
current_day = current_day[current_day['previous_day'] != "true"]
if "default" in current_day:
current_day = current_day[current_day['default'] != "true"]
# Add field data with attributes as custom properties
next_day_df = pd.DataFrame(columns=['tag', 'time', 'values', 'next_day'])
prev_day_df = pd.DataFrame(columns=['tag', 'time', 'values', 'previous_day'])
default_data = pd.DataFrame(columns=['tag', 'time', 'values', 'default'])
if all([len(next_record) == 0, len(prv_record) == 0, len(latest_record) == 0]):
field_props = current_day.set_index("prop")["values"].to_dict()
return field_props
if len(next_record) != 0:
next_day_df = self.relative_day(next_record, "next_day", form_df, request_data.tz)
if len(prv_record) != 0:
prev_day_df = self.relative_day(prv_record, "previous_day", form_df, request_data.tz)
if len(latest_record) != 0:
default_data = self.default_day(latest_record, "default", "last_value", form_df, request_data.tz)
field_props = self.merge_relative(default_data, current_day, next_day_df, prev_day_df)
return field_props
except Exception as e:
logger.error(f"Error in custom implementation time_associated, {e}")
def get_data_dfs(self,
form_df,
data_req,
request_data,
next_record):
try:
form_df = form_df[form_df['time_associated'] == "true"].reset_index().rename(
columns={"index": "prop"})
form_df_time = form_df.copy()
final_df = self.form_data_df(data_req, request_data.tz)
rounded_df = self.processor.round_off(final_df, "values")
current_day = self.processor.merge_with_another_df(form_df_time, rounded_df, merge_on=['tag', 'time'])
if "next_day" in current_day:
current_day = current_day[current_day['next_day'] != "true"]
# Add field data with attributes as next_day
if next_record:
next_day_df = self.relative_day(next_record, "next_day", form_df, request_data.tz)
else:
next_day_df = pd.DataFrame(columns=["time_associated", "time", "tag", "next_day", "values", "prop"])
return current_day, next_day_df
except Exception as e:
logger.error(f"Error in custom implementation time_associated, {e}")
import pandas as pd
import requests
from fastapi import Request
from scripts.config.app_configurations import PathToServices
from scripts.constants import StatusCodes
from scripts.constants.api import DataEngineEndpoints
from scripts.core.engine.custom_implementations import CustomImplementations
from scripts.core.schemas.forms import SaveForm, TasksInfoList
from scripts.db import PeriodicData, TaskInstanceData, TaskInstance
from scripts.db import mongo_client
from scripts.logging.logging import logger
from scripts.utils.common_utils import CommonUtils
from scripts.utils.data_processor import ProcessData
class DataEngine:
def __init__(self, project_id=None):
self.common_utils = CommonUtils(project_id=project_id)
self.periodic_conn = PeriodicData(mongo_client, project_id=project_id)
self.tasks_data = TaskInstanceData(mongo_client, project_id=project_id)
self.tasks = TaskInstance(mongo_client, project_id=project_id)
self.processor = ProcessData(project_id=project_id)
self.custom_imp = CustomImplementations(project_id=project_id)
def get_iot_param(self, form_info, form_data, date, tz, request_obj: Request):
try:
tag_dict = dict()
for _name, props in form_info.items():
current_properties = set(props.keys())
if _name not in form_data and {"time", "time_associated", "tag", "manual_entry"}.intersection(
current_properties) in [{"tag"}, {"tag", "manual_entry"}]:
tag_dict.update({_name: props["tag"]})
if len(tag_dict) == 0:
return form_data
iot_values = self.get_tag_values(set(tag_dict.values()), for_date=date, tz=tz, request_obj=request_obj)
if not iot_values:
return form_data
returned_data = {
_name: round(iot_values[tag], 2) if isinstance(iot_values[tag], (int, float)) else iot_values[tag] for
_name, tag in tag_dict.items() if
tag in iot_values.keys()}
form_data.update(returned_data)
return form_data
except Exception as e:
logger.error("Failed in get_iot_param", e)
def data_for_next_day(self, date, step_id, form_df, property_name, relative_day=1):
try:
if property_name not in form_df:
return list()
next_date_in_format = self.common_utils.get_next_date(date, "yyyy-MM-dd", relative_day)
next_record = self.periodic_conn.find_by_date_and_step(next_date_in_format, step_id)
if not next_record.data:
return list()
return next_record.data
except Exception as e:
logger.error("Failed in data_for_next_day", e)
def get_last_value(self, step_id, today, form_df):
try:
if "default" not in form_df:
return []
form_df = form_df[form_df['default'] == "last_value"]
default_tags = list(form_df["tag"]) if "tag" in form_df.columns else []
last_record = self.periodic_conn.find_by_latest_data(step_id, today, default_tags)
if not last_record:
return []
return [last_record]
except Exception as e:
logger.error("Failed in get_last_value", e)
return []
def get_data_for_date(self, request_data: SaveForm, step_id, field_props, date, datetime_obj):
try:
if not field_props:
return dict()
form_df = pd.DataFrame.from_dict(field_props, orient='index')
current_record = self.periodic_conn.find_by_date_and_step(date, step_id)
prv_record = self.data_for_next_day(date, step_id, form_df, "previous_day", -1)
next_record = self.data_for_next_day(date, step_id, form_df, "next_day", 1)
latest_record = self.get_last_value(step_id, datetime_obj, form_df)
month_to_date = self.custom_imp.month_to_date(step_id, datetime_obj, form_df)
current_record.manual_data.update(month_to_date)
if not any([current_record.data, current_record.manual_data, prv_record, next_record, latest_record]):
return dict()
if not any([current_record.data, prv_record, next_record, latest_record]) and current_record.manual_data:
return current_record.manual_data
if all(["time_associated" not in form_df, latest_record, current_record.manual_data]):
return current_record.manual_data
if "time_associated" in form_df:
field_props = self.custom_imp.time_associated(form_df, current_record.data, request_data, next_record,
prv_record, latest_record)
field_props.update(current_record.manual_data)
return field_props
except Exception as e:
logger.error("Failed in get_data_for_date", e)
raise
def get_current_and_next_df(self, request_data: SaveForm, field_props, next_record, record):
try:
base_df = pd.DataFrame(columns=["time_associated", "time", "tag", "next_day", "values", "prop", "manual"])
form_df = pd.DataFrame.from_dict(field_props, orient='index')
if not record.data:
return base_df, base_df, form_df, dict()
data_req = record.data
if "time_associated" in form_df:
current, next_day = self.custom_imp.get_data_dfs(form_df, data_req, request_data, next_record)
return current, next_day, form_df, record.manual_data
except Exception as e:
logger.error("Failed in get_data_for_date", e)
raise
@staticmethod
def get_tag_values(tag_list, request_obj: Request,
ignore_stale=False,
for_date=None,
last_data=True,
tz="Asia/Kolkata",
from_time=None,
to_time=None):
try:
if not tag_list:
return None
cookies = request_obj.cookies
tag_list = [x for x in tag_list if x]
tag_json = dict(tag_list=list(tag_list),
tz=tz)
if for_date:
tag_json.update(filter_by_date=for_date)
elif all([from_time, to_time, not last_data]):
tag_json.update(from_time=from_time, to_time=to_time)
response = requests.post(
f"{PathToServices.DATA_ENGINE}{DataEngineEndpoints.api_iot_param}"
f"?last_data={last_data}&ignore_stale={ignore_stale}", json=tag_json,
timeout=30, cookies=cookies)
status = response.status_code
if status == 404:
raise ModuleNotFoundError
content = response.json()
if "data" not in content or not content["data"]:
return None
elif status not in StatusCodes.SUCCESS or content["status"] != "success" or not content["data"]:
logger.debug(f"Content returned: {content}")
logger.error("Error Encountered: Communication to Data engine was unsuccessful")
return None
logger.info("Communication to Data engine was successful, response content: ", content)
values = content["data"]["values"]
if isinstance(values, list):
values = values[0]
return values
except requests.exceptions.ReadTimeout:
raise TimeoutError(f"Request Time out on IOT param call")
except Exception as e:
logger.error(f"Error Encountered while contacting Data Engine, {e}")
raise
def get_tasks_from_logbooks(self, logbook_list):
try:
tasks_list = self.tasks.find_by_logbooks(logbook_list)
return [TasksInfoList(**x) for x in tasks_list]
except Exception as e:
logger.error(f"Error Encountered in get_tasks_from_logbooks, {e}")
raise
def get_data_for_task(self, task_id):
try:
stage_list = self.tasks_data.find_data_by_task_id(task_id)
return stage_list
except Exception as e:
logger.error(f"Error Encountered in get_tasks_from_logbooks, {e}")
raise
"""Engine related to form rendering service
- Page displayed to user based on his role (Save/Submit/Examination buttons displayed)
- Fetches data with form and saves data to the form
> Input (workflow_instance_id, stage_id)
"""
import json
import re
import threading
import time
from copy import deepcopy
from datetime import datetime
import pandas as pd
import requests
from dateutil.relativedelta import relativedelta
from fastapi import Request
from scripts.config.app_configurations import EnableAuditing, PathToServices, BackFill, EnableEvents
from scripts.constants import StepCategories
from scripts.constants.api import FormEndPoints
from scripts.constants.app_constants import AuditingKeys, CommonStatusCode, SubmitAction
from scripts.constants.date_constants import ui_time_format_data
from scripts.core.engine.custom_implementations import CustomImplementations
from scripts.core.engine.data_engine import DataEngine
from scripts.core.engine.periodic_entry import PeriodicEntry
from scripts.core.engine.submit_actions import SubmitActions
from scripts.core.schemas.auditing import UserDataEntryRecord
from scripts.core.schemas.forms import SaveForm
from scripts.core.schemas.stages import TriggerReferenceData
from scripts.db import mongo_client, StepCollection, TaskInstanceData, PeriodicData, TaskInstance, Trigger, User, \
FormProps, TaskCollection, TaskInstanceDataSchema, TriggerStepCollection, Workflow
from scripts.db.mongo.ilens_assistant.collections.reference_steps import ReferenceStep
from scripts.db.mongo.ilens_assistant.collections.task_info import TaskSchema
from scripts.db.mongo.ilens_configuration.collections.lookup_table import LookupTable
from scripts.db.mongo.ilens_configuration.collections.site_conf import SiteConf
from scripts.errors import RequiredFieldMissing, ImplementationError, InternalError, QuantityGreaterThanException
from scripts.logging.logging import logger
from scripts.utils.common_utils import CommonUtils
from scripts.utils.data_processor import ProcessData
from scripts.utils.formio_parser import get_field_props, get_field_props_by_keys, get_form_component_info
def background(f):
"""
a threading decorator
use @background above the function you want to run in the background
"""
def backgrnd_func(*a, **kw):
threading.Thread(target=f, args=a, kwargs=kw).start()
return backgrnd_func
class FormRenderingEngine:
def __init__(self, project_id=None):
self.processor = ProcessData(project_id=project_id)
self.steps_conn = StepCollection(mongo_client, project_id=project_id)
self.task_inst_data = TaskInstanceData(mongo_client, project_id=project_id)
self.task_inst = TaskInstance(mongo_client, project_id=project_id)
self.task_info = TaskCollection(mongo_client, project_id=project_id)
self.lookup_table = LookupTable(mongo_client, project_id=project_id)
self.data_engine = DataEngine(project_id=project_id)
self.user = User(mongo_client)
self.task_inst_conn = TaskInstance(mongo_client, project_id=project_id)
self.trigger_conn = Trigger(mongo_client, project_id=project_id)
self.form_props = FormProps(mongo_client, project_id=project_id)
self.common_utils = CommonUtils(project_id=project_id)
self.submit_action = SubmitActions(project_id=project_id)
self.periodic_conn = PeriodicData(mongo_client, project_id=project_id)
self.periodic_data_entry = PeriodicEntry(project_id=project_id)
self.site_conn = SiteConf(mongo_client, project_id=project_id)
self.reference_step_data_entry = ReferenceStep(mongo_client, project_id=project_id)
self.trigger_step_conn = TriggerStepCollection(mongo_client=mongo_client, project_id=project_id)
self.workflow_conn = Workflow(mongo_client, project_id=project_id)
self.formde_proxy = PathToServices.DATA_ENGINE
self.backfill_api_path = f'{self.formde_proxy}{FormEndPoints.api_backfill}'
self.workflow_conn = Workflow(mongo_client, project_id=project_id)
self.custom_imp = CustomImplementations(project_id=project_id)
def get_form_based_template(self, step_id):
try:
step = self.steps_conn.fetch_one_step(step_id=step_id)
return step or {}
except Exception as e:
logger.error("Failed to fetch template", e)
raise
async def save_data_to_stage(self, request_data: SaveForm, bg_task, db, user_id, request_obj: Request):
try:
stage_data = self.task_inst_data.find_by_id(stage_id=request_data.stage_id)
# updating user meta on every form edit
form_details = self.form_props.find_by_id(step_id=stage_data.step_id)
current_data = self.task_inst_conn.find_by_task_id(task_id=request_data.task_id)
if form_details.form_info:
key = "task_creation_data"
property_details = get_field_props_by_keys(form_details.form_info, key)
if property_details:
self.stage_filter(request_data=request_data, property_details=property_details, task_instance=current_data)
current_data.meta.update(self.common_utils.get_user_meta(user_id))
if EnableEvents.enable_events:
self.common_utils.trigger_create_event(request_data.dict(), current_data.dict(), user_id, request_obj)
self.task_inst_conn.update_instance_task(task_id=request_data.task_id, data=dict(meta=current_data.meta),
upsert=False)
try:
if bool(stage_data) and bool(request_data.task_id):
is_report_type = True
if not bool(form_details.form_info):
is_report_type = False
if is_report_type:
form_props_dict = get_field_props(form_details.form_info, request_data.template_type, "true")
if bool(form_props_dict) and bool(current_data):
update_json = {stage_data.step_id: request_data.stage_id}
task_info_data = self.task_info.find_by_task_id(task_info_id=current_data.task_info_id)
task_info_data.previous_stage_details.update(update_json)
self.task_info \
.update_task(task_info_id=current_data.task_info_id,
data={"previous_stage_details": task_info_data.previous_stage_details})
self.save_data_to_reference_step(request_data, step_id=stage_data.step_id)
if self.periodic_data_entry.save_periodic_data(request_data, bg_task, request_obj):
self.check_triggers_on_save(request_data, db, request_obj=request_obj)
return True
self.check_triggers_on_save(request_data, db, request_obj=request_obj)
if request_data.submitted_data and bool(request_data.submitted_data.get("data")):
if EnableAuditing.form_non_periodic_auditing:
bg_task.add_task(self.audit_submitted_data, request_data, stage_data)
self.task_inst_data.update_stage(request_data.stage_id, request_data.submitted_data)
self.save_data_in_master_step(request_data=request_data, user_id=user_id)
except QuantityGreaterThanException:
raise
except Exception as e:
if EnableAuditing.form_non_periodic_auditing:
bg_task.add_task(self.audit_submitted_data, request_data, stage_data, error=str(e.args))
except Exception as e:
logger.error("Failed to save data to stage", e)
raise
def check_triggers_on_save(self, request_data, db, request_obj: Request):
try:
role_id = self.common_utils.get_user_roles_by_project_id(user_id=request_data.user_id,
project_id=request_data.project_id)
user_role = role_id[0]
workflow_details = self.submit_action.get_workflow_details(request_data.task_id)
if not request_data.date:
self.common_utils.convert_trigger_date_to_epoch(request_data.triggers, request_data)
message_exists, message = self.submit_action.get_trigger_data(workflow_id=workflow_details["workflow_id"],
workflow_version=workflow_details[
"workflow_version"],
request_data=request_data,
user_role=user_role,
on_click=request_data.type,
db=db,
request_obj=request_obj)
logger.debug(f"Returned from get_trigger_data: {message_exists, message}")
except Exception as e:
logger.exception(f"Exception occurred in check triggers on save definition {e.args}")
def audit_submitted_data(self, request_data: SaveForm, stage_data: TaskInstanceDataSchema, error=None):
try:
user_rec = self.user.find_user(request_data.user_id)
old_data = stage_data.step_data.get("data", {})
submitted_data = request_data.submitted_data["data"]
user_name = user_rec.get("username")
utc_now = self.common_utils.get_time_now()
ip_address = self.common_utils.get_ip_of_user()
audit_list = []
for prop, val in submitted_data.items():
old_val = old_data.get(prop, "-")
if isinstance(old_val, (dict, list)):
old_val = json.dumps(old_val)
if isinstance(val, (dict, list)):
val = json.dumps(val)
if old_val in ["", "-"] and val in ["", "-"]:
continue
audit_model = UserDataEntryRecord(type=AuditingKeys.non_periodic, user_id=request_data.user_id,
user_name=user_name, ip_address=ip_address, date_time=utc_now,
source=AuditingKeys.user, previous_value=old_val, new_value=val,
property_name=prop, task_id=request_data.task_id,
step_id=stage_data.step_id, stage_id=request_data.stage_id,
project_id=request_data.project_id)
if error:
audit_model.action_status = "failed"
audit_model.error_logs = error
audit_list.append(audit_model.dict())
self.common_utils.auditing_with_kafka(audit_list)
logger.info(f"Audited record successfully")
return True
except Exception as e:
logger.exception(f"Exception occurred while updating the audit data{e}")
return False
@staticmethod
def remove_date_prop(field_props, _data):
"""To avoid overwriting the triggerOnChange components"""
trigger_prop_dict = {x: y for x, y in field_props.items() if
"triggerOnChange" in y.keys() and y["triggerOnChange"] == "true"}
if not _data:
return _data, list(trigger_prop_dict.keys())
for each in trigger_prop_dict.keys():
_data.pop(each, None)
return _data, list(trigger_prop_dict.keys())
def check_trigger_template_and_get_elements(self, step_id, t_date, template_record):
field_props = {}
if template_record.get("step_category") == StepCategories.TRIGGER_BASED:
if temp_record := self.trigger_step_conn.fetch_one_step(step_id=step_id, date=t_date):
template_record = temp_record
field_props = temp_record.get("form_info")
template = template_record.get("field_elements", dict()).get("components", list())
return template, field_props
async def recursive_component_manipulation(self, component_list, start_date, end_date, trigger_prop_list, tz):
for each in component_list:
if isinstance(each, list):
await self.recursive_component_manipulation(each, start_date, end_date,
trigger_prop_list, tz)
elif each.get("key", "") not in trigger_prop_list:
_type = list({"columns", "rows", "components"}.intersection(set(each)))
if _type and isinstance(each[_type[0]], list):
await self.recursive_component_manipulation(each[_type[0]], start_date, end_date,
trigger_prop_list, tz)
else:
continue
elif "datePicker" in each:
each.update({"enableMinDateInput": True, "enableMaxDateInput": True})
each["datePicker"].update(minDate=start_date)
each["datePicker"].update(maxDate=end_date)
return component_list
async def component_date_restriction(self, task_data, component, trigger_prop_list, tz):
try:
start_date, end_date = "", ""
meta_data = task_data.meta
if meta_data:
start_date = meta_data.get("created_at", "")
end_date = meta_data.get("completed_at", "")
if start_date:
start_date = self.common_utils.get_time_by_ts(int(start_date) // 1000, tz, ui_time_format_data[
"MM/dd/yyyy"])
if end_date:
end_date = self.common_utils.get_time_by_ts(int(end_date) // 1000, tz, ui_time_format_data[
"MM/dd/yyyy"])
component = await self.recursive_component_manipulation(component, start_date, end_date, trigger_prop_list,
tz)
return component
except Exception as e:
logger.error(f"Error occurred in component_date_restriction {e}")
return component
async def form_template_with_data(self, request_data: SaveForm, request_obj: Request):
try:
base_dict = dict(submitted_data=dict(data=dict()),
components=dict())
stage_data = self.task_inst_data.find_by_id(request_data.stage_id)
task_data = self.task_inst_conn.find_by_task_id(task_id=request_data.task_id)
if not stage_data:
return base_dict
updated_data = stage_data.step_data.get("data", dict()) if bool(stage_data.step_data) else dict()
step_id = stage_data.step_id if bool(stage_data) else ""
template_record = self.get_form_based_template(step_id)
base_props = self.form_props.find_by_id(step_id).form_info
if not request_data.date:
request_data.date = self.common_utils.get_trigger_in_epoch(request_data.triggers,
request_data.submitted_data,
base_props)
if not request_data.date:
raise RequiredFieldMissing("Date not added in triggers for periodic template")
datetime_obj = self.common_utils.time_zone_converter(request_data.date, request_data.tz)
date = str(datetime_obj.date())
template, field_props = self.check_trigger_template_and_get_elements(step_id, date, template_record)
if not template:
return base_dict
if not field_props:
# Assign base properties if not trigger step or if trigger step has no data on this date
field_props = base_props
# Bind data saved in periodic tables
machine_data = self.data_engine.get_data_for_date(request_data, step_id, field_props, date, datetime_obj)
# if not bool(updated_data) and not machine_data: #need to check this,
if not bool(updated_data):
updated_data = self.get_previous_submitted_data(task_data=task_data, prop_data=field_props,
step_data=template_record,
step_id=step_id,
template_type=request_data.template_type,
auto_populate_key=request_data.auto_populate_key,
stage_data=stage_data,
category=template_record["step_category"],
date=date)
updated_data = {k: v for k, v in updated_data.items() if v}
if machine_data:
machine_data.update(updated_data)
updated_data = deepcopy(machine_data)
if template_record.get("step_category") == StepCategories.NON_PERIODIC:
date = None
# Bind data with IoT params
if field_props:
updated_data = self.data_engine.get_iot_param(field_props, updated_data, date, request_data.tz,
request_obj=request_obj)
updated_data, trigger_prop_list = self.remove_date_prop(field_props, updated_data)
template = await self.component_date_restriction(task_data, template, trigger_prop_list,
request_data.tz)
start_date = task_data.meta.get("created_at", "")
if start_date:
start_date = self.common_utils.get_time_by_ts(int(start_date) // 1000, request_data.tz, "%Y-%m-%d")
else:
start_date = datetime.now().date().strftime("%Y-%m-%d")
self.populate_tank_level_data(field_props=field_props, step_data=template_record,
request_data=request_data, submitted_data=updated_data,
datetime_obj=datetime_obj, start_date=start_date)
self.get_data_from_reference_step(field_props=field_props, submitted_data=updated_data,
date=date, task_id=request_data.task_id)
form_data = dict(submitted_data=dict(data=updated_data),
components=template)
return form_data
except Exception as e:
logger.error("Failed to return stage form with data", e)
raise
async def back_fill(self, request_data, request_obj: Request):
task_step_data = self.task_inst_data.find_by_id(request_data.stage_id)
if not task_step_data:
logger.error('Record Not Found')
return f"Data refresh failed "
task_step_id = task_step_data.step_id
time_str = self.common_utils.get_iso_format(request_data.date / 1000, timezone=request_data.tz,
timeformat=ui_time_format_data["dd-MM-yyyy"])
back_fill_data = {
"step_ids": [task_step_id],
"tz": request_data.tz,
"start_date": time_str,
"end_date": time_str,
"interval_in_mins": int(BackFill.interval_in_mins),
}
cookies = request_obj.cookies
headers = {
'login-token': request_obj.headers.get('login-token', request_obj.cookies.get('login-token')),
'projectId': request_obj.cookies.get("projectId", request_obj.cookies.get("project_id",
request_obj.headers.get(
"projectId"))),
'userId': request_obj.cookies.get("userId",
request_obj.cookies.get("user_id", request_obj.headers.get(
"userId")))}
resp = requests.post(url=self.backfill_api_path, json=back_fill_data, timeout=30, cookies=cookies,
headers=headers)
logger.info(f"Cookies: {cookies} Headers: {headers}")
if resp.status_code not in CommonStatusCode.SUCCESS_CODES:
logger.error('Failed response from back fill api')
return f"Data refresh failed ,got {resp.status_code}"
else:
logger.info('Back fill api successfully executed')
return "Data refreshed successfully"
async def submit_data(self, request_data: SaveForm, db, user_id, request_obj: Request, mobile: bool = False):
try:
stage_data = self.task_inst_data.find_by_id(stage_id=request_data.stage_id)
# updating user meta on every form edit
current_data = self.task_inst_conn.find_by_task_id(task_id=request_data.task_id)
current_data.meta.update(self.common_utils.get_user_meta(user_id))
# triggering event
if EnableEvents.enable_events:
self.common_utils.trigger_create_event(request_data.dict(), current_data.dict(), user_id, request_obj)
self.task_inst_conn.update_instance_task(task_id=request_data.task_id, data=dict(meta=current_data.meta),
upsert=False)
if request_data.type == SubmitAction.refresh:
if request_data.triggers:
self.common_utils.convert_trigger_date_to_epoch(request_data.triggers, request_data)
msg = await self.back_fill(request_data, request_obj)
return msg
elif request_data.type != SubmitAction.save:
# Since trigger action check is available on save request
# and required fields check not required when type is save
required_field_missing = self.submit_action.check_required_fields_filled(request_data.stages)
if not required_field_missing:
return False
role_id = self.common_utils.get_user_roles_by_project_id(user_id=request_data.user_id,
project_id=request_data.project_id)
user_role = role_id[0]
workflow_details = self.submit_action.get_workflow_details(request_data.task_id)
if not workflow_details:
raise ImplementationError("Workflow for this task has been deleted")
if not request_data.date:
self.common_utils.convert_trigger_date_to_epoch(request_data.triggers, request_data)
message_exists, message = self.submit_action.get_trigger_data(
workflow_id=workflow_details["workflow_id"],
workflow_version=workflow_details["workflow_version"],
user_role=user_role,
on_click=request_data.type,
db=db, request_obj=request_obj, request_data=request_data)
logger.debug(f"Returned from get_trigger_data: {message_exists, message}")
if message_exists:
return message
elif request_data.type == "mark_complete":
task_instance_data = self.task_inst_data.find_by_id(stage_id=request_data.stage_id)
if task_instance_data:
self.task_inst_data.update_stage_data(stage_id=request_data.stage_id,
data=dict(status=not task_instance_data.status))
msg = "Form submitted successfully"
return msg
return "Form saved successfully"
except InternalError:
raise
except RequiredFieldMissing:
raise
except Exception as e:
logger.error("Failed to return stage form with data", e)
raise
def get_previous_submitted_data(self, task_data, prop_data: dict, step_id: str, step_data, template_type,
auto_populate_key: str, stage_data, category: str, date: str):
final_json = {}
try:
if bool(prop_data) and bool(task_data):
hierarchy_props_dict = get_field_props(prop_data, "hierarchy_populate", "true")
capacity_props_dict = get_field_props_by_keys(prop_data, "capacity_populate")
lookup_name = get_field_props_by_keys(prop_data, "lookup_name")
step_id_data = get_field_props_by_keys(prop_data, ["from_step", "step"])
step_key_data = get_field_props_by_keys(prop_data, ["from_key", "step_key"])
capacity_auto_props_dict = get_field_props(prop_data, "capacity_auto_populate", "true")
previous_keys = get_field_props_by_keys(prop_data, "auto_populate_value")
field_props_dict = get_field_props(prop_data, auto_populate_key, "true")
prev_stage_data = TaskInstanceDataSchema()
task_info_data = TaskSchema()
is_cross_step = get_field_props(prop_data, template_type, "true")
if is_cross_step and task_data:
task_info_data = self.task_info.find_by_task_id(task_info_id=task_data.task_info_id)
if task_info_data.previous_stage_details:
prev_stage_data = self.task_inst_data.find_by_id(
stage_id=task_info_data.previous_stage_details.get(stage_data.step_id))
if field_props_dict:
final_json.update(self.form_updated_submitted_json(
submitted_data=prev_stage_data.step_data.get("data", {}),
props_dict=field_props_dict, previous_keys=previous_keys, category=category, step_id=step_id,
date=date))
if step_id_data:
final_json.update(
self.load_step_data_to_another_step(task_info_data.dict(), step_id_data, step_key_data,
category, date))
if any([hierarchy_props_dict, capacity_auto_props_dict, capacity_props_dict]) and \
all([task_data, task_data.task_creation_data.get("hierarchy"),
task_data.task_creation_data.get("hierarchy", {}).get("site"), step_data,
step_data.get("field_elements")]):
input_components = get_form_component_info(step_data.get("field_elements"), "input_components")
site_id = task_data.task_creation_data.get("hierarchy", {}).get("site")
hierarchy_id = task_data.task_creation_data.get("hierarchy", {}).get(
task_data.task_creation_data.get("hierarchy", {}).get("hierarchyLevel"))
site_data = self.site_conn.find_site_by_site_id(site_id)
if bool(site_data) and bool(hierarchy_props_dict):
hierarchy_name = self.common_utils.get_hierarchy_name(site_data=site_data,
input_data=hierarchy_id)
for element in hierarchy_props_dict:
if element in input_components:
final_json.update({element: hierarchy_name})
lookup_dict = {}
lookup_list = {}
if capacity_auto_props_dict and lookup_name:
for k, v in lookup_name.items():
if v not in lookup_list:
lookup_data = self.get_data_from_lookup(lookup_name=v, lookup_id=hierarchy_id)
lookup_list.update({v: lookup_data})
lookup_dict.update({k: lookup_data})
else:
lookup_dict.update({k: lookup_list[v]})
for element in capacity_auto_props_dict:
if element in input_components and lookup_dict.get(element):
final_json.update({element: lookup_dict.get(element, '')})
elif capacity_props_dict and lookup_name:
for k, v in lookup_name.items():
if v not in lookup_list:
lookup_data = self.get_data_from_lookup(lookup_name=v,
lookup_id=capacity_props_dict.get(k, ''))
if lookup_data and k in input_components:
final_json.update({k: lookup_data})
except Exception as e:
logger.error("Failed to return stage form with data", e)
return final_json
def form_updated_submitted_json(self, submitted_data: dict, props_dict: dict, previous_keys: dict, category: str,
step_id: str, date: str):
return_json = {}
try:
if category in [StepCategories.PERIODIC, StepCategories.TRIGGER_BASED]:
periodic_data = list(self.periodic_conn.find_data_with_date(step_id=step_id,
_date_query={"$lt": date},
sort_json={"date": -1}))
if not periodic_data:
return return_json
manual_entry_data = periodic_data[0].get("manual_data")
if not manual_entry_data:
return return_json
for k, v in props_dict.items():
return_json.update({k: manual_entry_data.get(previous_keys.get(k), manual_entry_data.get(k))})
else:
if not submitted_data:
return return_json
for k, v in props_dict.items():
return_json.update({k: submitted_data.get(previous_keys.get(k), submitted_data.get(k))})
except Exception as e:
logger.exception(f"Exception occurred while auto populating previous task details {e}")
return return_json
def load_step_data_to_another_step(self, task_info_data: dict, step_id_data: dict, step_key_data: dict,
category: str, date: str):
return_json = {}
stage_details = {}
try:
if category in [StepCategories.PERIODIC, StepCategories.TRIGGER_BASED]:
steps = list(set(step_id_data.values()))
periodic_data = self.periodic_conn.find_by_date_and_multi_step(step_id_list=steps, _date=date)
for each in periodic_data:
stage_details.update({each["step_id"]: each.get("manual_data", {})})
for key, step in step_id_data.items():
actual_key = step_key_data.get(key)
if actual_key:
return_json.update({key: stage_details.get(step, dict()).get(actual_key, "")})
return return_json
for k, v in step_id_data.items():
if bool(task_info_data.get("previous_stage_details", {}).get(v)):
stage_details.update({k: task_info_data.get("previous_stage_details", {}).get(v)})
if not bool(list(stage_details.values())):
return return_json
stage_data = self.task_inst_data.find_data_for_multiple_stages(stages_list=list(stage_details.values()))
stage_map_dict = {data.get("stage_id"): data for data in stage_data}
for k, v in step_key_data.items():
return_json.update(
{k: stage_map_dict.get(stage_details.get(k), {}).get("step_data", {}).get("data", {}).get(v, "")})
except Exception as e:
logger.error("Failed to return stage form with data", e)
return return_json
def get_data_from_lookup(self, lookup_name, lookup_id):
response = ""
try:
if not lookup_id:
return response
lookup_data = self.lookup_table.find_lookup_dict(lookup_name=lookup_name)
for data in lookup_data.get("lookup_data", []):
if data.get("lookupdata_id") == lookup_id:
response = data.get("lookup_value")
return response
except Exception as e:
logger.error("Failed to return stage form with data", e)
return response
def populate_tank_level_data(self, field_props: dict, step_data: dict, request_data, submitted_data: dict,
datetime_obj, start_date):
try:
if not field_props:
return submitted_data
parameter_step_id = get_field_props_by_keys(field_props, "parameter_step_id")
parameter_step_id = list(parameter_step_id.values())[0] if parameter_step_id else None
if parameter_step_id:
date = str(self.common_utils.time_zone_converter(request_data.date, request_data.tz).date())
parameter_record = self.get_form_based_template(parameter_step_id)
_parameter_field_props = self.form_props.find_by_id(parameter_step_id).form_info
parameter_template, parameter_field_props = self.check_trigger_template_and_get_elements(
parameter_step_id, date, parameter_record)
tag_tank = get_field_props_by_keys(field_props, "master_tank_tag")
tag_volume = get_field_props_by_keys(field_props, "master_volume_tag")
tag_tank = list(tag_tank.values())[0].replace("$",
"_") if tag_tank else None
logger.debug(f"Tank Tag details --> {tag_tank}")
tag_volume = list(tag_volume.values())[0].replace("$",
"_") if tag_volume else None
logger.debug(f"Volume Tag details --> {tag_volume}")
if not parameter_template or not tag_tank or not tag_volume:
return submitted_data
if not parameter_field_props:
parameter_field_props = deepcopy(_parameter_field_props)
updated_machine_data = self.fetch_machine_data_for_tank_population(datetime_obj=datetime_obj,
request_data=request_data,
parameter_field_props=parameter_field_props,
end_date=date, start_date=start_date,
tag_tank=tag_tank,
parameter_step_id=parameter_step_id,
tag_volume=tag_volume)
updated_machine_data = {k: float(sum(v)) for k, v in updated_machine_data.items()}
input_components = [v for k, v in
get_form_component_info(step_data["field_elements"], "components").items() if
v.type == "table"]
final_keys_list = []
temp_dict = {}
if updated_machine_data:
for _item in input_components:
for index, each_row in enumerate(_item.rows):
temp_list = []
is_row_valid = True
if index == 0:
continue
for _index, _data in enumerate(each_row):
if len(each_row) == 3 and _index == 0:
continue
temp_list.append(_data.key)
if bool(submitted_data.get(_data.key)):
is_row_valid = False
if is_row_valid:
final_keys_list.append(temp_list)
tank_details = list(map(str, list(filter(lambda x: isinstance(x, int), list(submitted_data.values())))))
logger.debug(f"Tank No detail --> {tank_details}")
for k, v in updated_machine_data.items():
if not k:
continue
if final_keys_list and len(final_keys_list[0]) == 2:
# if str(k) in tank_details:
# continue
submitted_data.update({final_keys_list[0][0]: k, final_keys_list[0][-1]: v})
final_keys_list.pop(0)
logger.debug(f"Input Components list --> {temp_dict}")
except Exception as e:
logger.error("Failed to load step from parameter_step_id", e)
return submitted_data
def save_data_to_reference_step(self, request_data, step_id):
try:
reference_dict = {}
step = self.steps_conn.fetch_one_step(step_id=step_id)
if not step:
raise RequiredFieldMissing("step not exists")
form_props = self.form_props.find_by_id(step_id).form_info
if not form_props:
return False
reference_data = get_field_props(form_props=form_props, search_keys="reference_step", value="true")
entity_data = get_field_props_by_keys(form_props=form_props, search_keys="entity_name")
entity_name = list(entity_data.values())[0] if bool(list(entity_data.values())) else ""
if not request_data.submitted_data or not bool(
request_data.submitted_data.get("data")) or not reference_data:
return False
if not request_data.date:
request_data.date = self.common_utils.get_trigger_in_epoch(request_data.triggers,
request_data.submitted_data,
form_props)
date = str(self.common_utils.time_zone_converter(request_data.date, request_data.tz).date())
submitted_data = request_data.submitted_data["data"]
property_dict = get_field_props_by_keys(form_props=form_props, search_keys="entity_key")
if step.get("step_category") in [StepCategories.TRIGGER_BASED]:
self.save_reference_data_for_trigger_steps(step_id=step_id, date=date, submitted_data=submitted_data,
step_data=step, entity_name=entity_name)
return True
for k, v in submitted_data.items():
reference_dict.update({property_dict.get(k, k): v})
self.reference_step_data_entry.update_data_with_date(data=reference_dict, _date=date, step_id=step_id,
step_category=step.get("step_category"),
entity_name=entity_name, task_id=request_data.task_id)
return True
except Exception as e:
logger.error("Failed to save data in reference step", e)
return False
def get_data_from_reference_step(self, field_props: dict, submitted_data: dict, date, task_id: str):
try:
previous_step_data = {}
if not field_props:
return submitted_data
step_dict = get_field_props_by_keys(field_props, "referred_step")
step_key_dict = get_field_props_by_keys(field_props, "referred_key")
date_key_dict = get_field_props(field_props, "referred_date", "false")
previous_key_date = get_field_props(field_props, "referred_previous_date", "true")
previous_keys_list = get_field_props_by_keys(field_props, "referred_previous_date")
task_based_filter = get_field_props_by_keys(field_props, "task_search_enabled")
non_periodic_search = get_field_props_by_keys(field_props, "non_periodic_search")
if not step_dict or not step_key_dict:
return submitted_data
step_list_by_date = list(set(list(step_dict.values())))
step_list_not_by_date = []
for k, _step in date_key_dict.items():
if k in step_dict:
step_list_by_date.remove(step_dict[k])
step_list_not_by_date.append(step_dict[k])
if task_based_filter:
step_data = self.reference_step_data_entry.find_by_date_and_multi_step(step_id_list=step_list_by_date,
_date=date, task_id=task_id)
# elif non_periodic_search:
# step_data = self.reference_step_data_entry.find_by_multi_step_without_date(step_id_list=step_list_by_date)
else:
step_data = self.reference_step_data_entry.find_by_date_and_multi_step(step_id_list=step_list_by_date,
_date=date)
if previous_key_date:
_date = (datetime.strptime(date, "%Y-%m-%d") - relativedelta(days=1)).strftime("%Y-%m-%d")
previous_step_data = self.reference_step_data_entry.find_by_date_and_multi_step(
step_id_list=step_list_by_date,
_date=_date)
step_data.update(self.reference_step_data_entry.fetch_data_from_query(
query={"step_id": {"$in": step_list_not_by_date}}))
default_step_id = list(step_dict.values())[0] if step_dict else None
if default_step_id:
for k, v in step_key_dict.items():
if k in previous_keys_list and step_dict[k] in previous_step_data:
value = previous_step_data[step_dict[k]].get("data", {}).get(v, "")
elif k in step_dict and step_dict[k] in step_data and k not in previous_keys_list:
value = step_data[step_dict[k]].get("data", {}).get(v, "")
else:
value = ""
submitted_data.update({k: value})
except Exception as e:
logger.error("Failed to fetch data from reference step", e)
return submitted_data
async def form_fill_with_reference_data(self, input_data: TriggerReferenceData, entity_name: str,
entity_search: str):
input_request = SaveForm(**{"type": input_data.type, "tz": input_data.tz, "project_id": input_data.project_id,
"stage_id": input_data.stage_id, "current_status": input_data.current_status,
"user_id": input_data.user_id, "triggers": input_data.triggers,
"task_id": input_data.task_id})
try:
submitted_data = {}
stage_data = self.task_inst_data.find_by_id(input_data.stage_id)
if not stage_data:
return submitted_data
form_props = self.form_props.find_by_id(stage_data.step_id).form_info
if not form_props:
return submitted_data
prop_value = input_data.property_value
if input_data.field_type.lower() in ["number", "integer", "int"]:
prop_value = int(prop_value)
query_dict = {f"data.{input_data.entity_key}": prop_value}
if entity_search.lower() != "false":
query_dict.update({"entity_name": entity_name})
row_unique_data = get_field_props(form_props, "row_unique_key", input_data.row_unique_key)
step_key_dict = get_field_props_by_keys(form_props, "referred_key")
records = self.reference_step_data_entry.find_data_from_query(query=query_dict,
sort_json={"_id": -1}, find_one=False)
reference_data = self.common_utils.get_updated_reference_data(records)
if not bool(reference_data):
return submitted_data
for _data in row_unique_data.keys():
if _data in step_key_dict and step_key_dict[_data] in reference_data["data"]:
submitted_data.update({_data: reference_data["data"][step_key_dict[_data]]})
input_request.submitted_data["data"] = submitted_data
return input_request.submitted_data
except Exception as e:
logger.error("Failed to fill the form with data with AR No", e)
return {}
def save_reference_data_for_trigger_steps(self, submitted_data: dict, step_data: dict,
step_id: str, date: str, entity_name: str):
try:
form_props = self.trigger_step_conn.fetch_one_step(step_id=step_id, date=date)
form_props = form_props if bool(form_props) else {}
property_dict = get_field_props_by_keys(form_props=form_props.get("form_info", {}),
search_keys="entity_key")
form_data = deepcopy(submitted_data)
events_list = list(
{
'_'.join(data.split('_')[-2:])
for data in list(submitted_data.keys())
}
)
for event in events_list:
temp_json = {}
for k, v in submitted_data.items():
if event not in k:
continue
temp_json.update({property_dict.get(k, k): v})
form_data.pop(k)
submitted_data = deepcopy(form_data)
self.reference_step_data_entry.update_data_for_trigger_steps(data=temp_json, _date=date,
step_id=step_id,
step_category=step_data.get(
"step_category"),
entity_name=entity_name,
event_id=event)
return True
except Exception as e:
logger.error("Failed to save data in reference step for trigger based steps", e)
return False
def fetch_machine_data_for_tank_population(self, start_date, end_date, request_data, parameter_step_id,
parameter_field_props, datetime_obj, tag_tank, tag_volume):
return_dict = {}
try:
dates = []
start_date = (datetime.strptime(start_date, "%Y-%m-%d"))
end_date = (datetime.strptime(end_date, "%Y-%m-%d"))
delta = end_date - start_date
regex_spl = re.compile('[@_!#$%^&*()<>?/\|}{~:]')
for i in range(delta.days + 1):
each_day = (start_date + relativedelta(days=i)).strftime("%Y-%m-%d")
dates.append(each_day)
machine_data = self.machine_date_for_multiple_dates(request_data=request_data, step_id=parameter_step_id,
date_list=dates, field_props=parameter_field_props)
machine_data = machine_data if machine_data else {}
_reference_list = []
for key, value in machine_data.items():
keys_list = {}
if not isinstance(value, dict):
continue
for _data, v in value.items():
var = _data.split("_")
if _data.startswith(tag_tank) or _data.startswith(tag_volume):
event_key = "_".join(var[-2:])
if event_key not in keys_list:
keys_list.update({event_key: {}})
if _data.startswith(tag_tank):
keys_list[event_key].update({"keys": v})
if _data.startswith(tag_volume):
keys_list[event_key].update({"values": v})
for k, v in keys_list.items():
if v["keys"] not in return_dict:
return_dict.update({v["keys"]: []})
if not v["values"] or ("-" in v["values"] and len(v["values"]) == 1):
continue
if isinstance(v["values"], str) and (
all(_chr.isalpha() for _chr in v["values"]) or regex_spl.search(v["values"])):
continue
try:
return_dict[v["keys"]].append(float(v["values"]))
except Exception as e:
logger.debug(f"Exception occurred while converting values to float {e}")
continue
except Exception as e:
logger.exception(f'Exception occurred while fetching the machine data {e}')
return return_dict
def machine_date_for_multiple_dates(self, step_id, date_list: list, field_props: dict, request_data):
periodic_dict = {}
try:
periodic_data = list(self.periodic_conn.find(query={'step_id': step_id, "date": {'$in': date_list}}))
trigger_steps = list(self.trigger_step_conn.aggregate(
pipelines=[{'$match': {'step_id': step_id, "date": {'$in': date_list}}},
{'$group': {'_id': None, 'data': {
'$push': {'k': {'$ifNull': ['$date', '']}, 'v': {'$ifNull': ['$form_info', '']}}}}},
{'$replaceRoot': {'newRoot': {'$arrayToObject': '$data'}}}]))
trigger_steps = trigger_steps[0] if trigger_steps else {}
for _data in periodic_data:
form_df = pd.DataFrame.from_dict(trigger_steps.get(_data.get('date')) or field_props, orient='index')
form_df = form_df[form_df['time_associated'] == "true"].reset_index().rename(
columns={"index": "prop"})
form_df_time = form_df.copy()
if _data.get("date") not in periodic_dict:
periodic_dict[_data.get('date')] = {}
if "time_associated" in form_df and _data.get('data'):
final_df = self.custom_imp.form_data_df(_data.get('data'), request_data.tz)
rounded_df = self.processor.round_off(final_df, "values")
current_day = self.processor.merge_with_another_df(form_df_time, rounded_df,
merge_on=['tag', 'time'])
if "next_day" not in current_day:
current_day['next_day'] = ''
if "previous_day" not in current_day:
current_day['previous_day'] = ''
if "default" not in current_day:
current_day['default'] = ''
field_props = self.custom_imp.merge_relative(current_day)
periodic_dict[_data.get('date')] = field_props
else:
periodic_dict[_data.get('date')] = _data.get('manual_entry', {}) if _data.get('manual_entry',
{}) else {}
except Exception as e:
logger.error(f"Exception occurred while fetching the data for multiple dates {e}")
return periodic_dict
def received_by(self, user_id):
return self.user.find_user(user_id).get('username', "")
@staticmethod
def last_updated_at():
return time.time() * 1000
def save_data_in_master_step(self, request_data, user_id):
try:
task_data = self.task_inst_conn.find_by_task_id(request_data.task_id)
if task_data.master_details.get('auto_save', ""):
for each in task_data.master_details.get('auto_save', ""):
default_obj = getattr(FormRenderingEngine(), each)
val = default_obj(user_id)
task_data.task_creation_data.update({each: val})
self.task_inst_conn.update_instance_task(task_data.task_id, data=task_data.dict())
counter = task_data.master_details.get("task_count", 0)
updated_dict = {}
master_task_id = task_data.master_details.get("master_task_id", "")
master_task_data = self.task_inst_conn.find_by_task_id(master_task_id)
step_list = []
if master_task_data:
prefix_wise_counter = master_task_data.master_details.get('prefix_wise_counter', {})
prefix = task_data.master_details.get('prefix_key', "")
prefix_value = prefix_wise_counter.get(prefix, "")
counter_considered = prefix_value if prefix and prefix_value else counter
if counter_considered:
for key, value in request_data.submitted_data.get('data', {}).items():
if prefix:
key_string = f"step_data.data.{prefix}_{key}_{counter_considered}"
else:
key_string = f"step_data.data.{key}_{counter_considered}"
updated_dict.update({key_string: value})
step_list = task_data.master_details.get("master_steps", [])
if step_list and master_task_id:
task_data_list = self.task_inst_data.find_data_with_task_id_step_list(master_task_id, step_list)
stages_list = []
for each in task_data_list:
stages_list.append(each.get("stage_id"))
if stages_list:
self.task_inst_data.update_many_stages(stages_list, updated_dict, )
return True
except Exception as e:
logger.error(f"Exception occurred while saving data in master step {e}")
raise
@staticmethod
def validate_material_info(form_props, request_data: SaveForm, request_obj: Request):
try:
input_json = {"project_id": request_data.project_id, "data": {}, "service_type": "subtraction"}
props_data = get_field_props_by_keys(form_props=form_props, search_keys="material_entry")
if props_data:
for key, value in props_data.items():
if value == "material_id":
if not request_data.submitted_data.get("data", {}).get(key):
logger.debug("Material data value doesn't exist")
return False, "Material data value doesn't exist"
input_json.update({value: request_data.submitted_data.get("data", {}).get(key)})
continue
if not request_data.submitted_data.get("data", {}).get(key):
continue
input_json["data"].update({value: request_data.submitted_data.get("data", {}).get(key)})
api_url = PathToServices.METADATA_SERVICES + 'ilens_config/material/update'
try:
resp = requests.post(url=api_url, cookies=request_obj.cookies,
json=input_json)
logger.debug(f"Resp Code:{resp.status_code}")
if resp.status_code in CommonStatusCode.SUCCESS_CODES:
response = resp.json()
logger.debug(f"Response:{response}")
if response.get("status") == "failed":
return False, response.get("message")
return True, response.get("message")
return False, "Connection Failure for Updating Material Info"
except requests.exceptions.ConnectionError as e:
logger.exception(e.args)
return False, "Connection Failure for Updating Material Info"
except Exception as e:
logger.exception(f'Error occurred while validating the material info {e}')
return True, "Success"
def stage_filter(self, request_data: SaveForm, property_details, task_instance):
try:
if property_details:
property_key = list(property_details.keys())
creation_data = task_instance.task_creation_data
creation_data.update(property_details)
entered_data = request_data.submitted_data.get("data")
final_dict = {}
for i in property_key:
values_saved = entered_data.get(i)
final_dict.update({i: values_saved})
self.task_inst.update_task_creation_by_task_id(task_id=request_data.task_id,
property_dict=final_dict)
except Exception as e:
logger.error(f"Exception while saving record {str(e)}")
raise
from copy import deepcopy
import numpy as np
from scripts.config.app_configurations import PathToServices, EnableAuditing
from scripts.constants import StepCategories, CustomObjects
from scripts.constants.api import CustomEndPoints
from scripts.constants.app_constants import AuditingKeys
from scripts.constants.date_constants import ui_time_format_data
from scripts.core.engine.data_engine import DataEngine
from scripts.core.handlers.custom_handler import CustomHandler
from scripts.core.schemas.auditing import UserDataEntryRecord
from scripts.core.schemas.custom_models import SaveTableRequest
from scripts.core.schemas.forms import SaveForm
from scripts.db import mongo_client, StepCollection, TaskInstanceData, PeriodicData, User, TriggerStepCollection
from scripts.db.mongo.ilens_assistant.collections.form_props import FormProps
from scripts.db.psql.databases import get_assistant_db
from scripts.errors import RequiredFieldMissing
from scripts.logging.logging import logger
from scripts.utils.common_utils import CommonUtils
from scripts.utils.data_processor import ProcessData
from scripts.utils.ilens_publish_data import KairosWriter
full_date_format = "yyyy-dd-MM HH:mm"
class PeriodicEntry:
def __init__(self, project_id=None):
self.steps_conn = StepCollection(mongo_client, project_id=project_id)
self.task_inst_data = TaskInstanceData(mongo_client, project_id=project_id)
self.form_props = FormProps(mongo_client, project_id=project_id)
self.common_utils = CommonUtils(project_id=project_id)
self.periodic_conn = PeriodicData(mongo_client, project_id=project_id)
self.kairos_writer = KairosWriter()
self.data_engine = DataEngine(project_id=project_id)
self.processor = ProcessData(project_id=project_id)
self.user_conn = User(mongo_client)
self.trigger_step = TriggerStepCollection(mongo_client, project_id=project_id)
self.api_custom_save_to_psql = f"{PathToServices.FORM_MT}{CustomEndPoints.api_save_table}"
self.custom_handler = CustomHandler()
self.ref_tag_time_manual_next = {"tag", "time", "time_associated", "manual_entry", "next_day"}
self.ref_tag_time_manual_previous = {"tag", "time", "time_associated", "manual_entry", "previous_day"}
self.ref_tag_time_next = {"tag", "time", "time_associated", "next_day"}
self.ref_tag_time_previous = {"tag", "time", "time_associated", "previous_day"}
self.ref_tag_time_manual = {"tag", "time", "time_associated", "manual_entry"}
self.ref_tag_time = {"tag", "time", "time_associated"}
self.ref_tag_manual_entry = {"tag", "manual_entry"}
self.ref_manual_entry = {"manual_entry"}
def save_periodic_data(self, request_data: SaveForm, bg_task, request_obj):
try:
stage_data = self.task_inst_data.find_by_id(request_data.stage_id)
step = self.steps_conn.fetch_one_step(step_id=stage_data.step_id)
if step["step_category"] not in [StepCategories.PERIODIC, StepCategories.TRIGGER_BASED] or \
not request_data.submitted_data or not bool(request_data.submitted_data.get("data")):
return False
submitted_data = request_data.submitted_data["data"]
form_props = self.form_props.find_by_id(stage_data.step_id).form_info
if not form_props:
raise RequiredFieldMissing("Form properties not associated to this periodic step")
if not request_data.date:
request_data.date = self.common_utils.get_trigger_in_epoch(request_data.triggers,
request_data.submitted_data,
form_props)
if not request_data.date:
raise RequiredFieldMissing("Date not added in triggers for periodic template")
datetime_obj = self.common_utils.time_zone_converter(request_data.date, request_data.tz)
date = str(datetime_obj.date())
if step["step_category"] == StepCategories.TRIGGER_BASED:
trigger_props = self.trigger_step.fetch_one_step(step_id=stage_data.step_id, date=date)
form_props = trigger_props["form_info"] if trigger_props and trigger_props.get(
"form_info") else form_props
next_date = self.common_utils.get_next_date(date, "yyyy-MM-dd", 1)
previous_date = self.common_utils.get_next_date(date, "yyyy-MM-dd", -1)
next_day_record = self.periodic_conn.find_by_date_and_step(next_date, stage_data.step_id)
prev_day_record = self.periodic_conn.find_by_date_and_step(previous_date, stage_data.step_id)
today_record = self.periodic_conn.find_by_date_and_step(date, stage_data.step_id)
tag_dict, kairos_dict, next_day_dict, only_manual, iot_param, previous_day_dict = \
self.form_tag_dicts(form_props, date, datetime_obj, request_data, submitted_data, stage_data.step_id,
step.get("replicate_type"), bg_task, request_obj)
if any([kairos_dict, iot_param]):
if set(iot_param.keys()).intersection(set(kairos_dict.keys())):
kairos_dict = {_time: (iot_param[_time] if _time in iot_param.keys() else tags) for _time, tags in
kairos_dict.items()}
else:
kairos_dict.update(iot_param)
self.common_utils.publish_data_to_kafka(kairos_dict, request_data.project_id)
try:
periodic_data = dict(data=tag_dict, manual_data=only_manual)
self.periodic_conn.save_and_update_data(_date=date, data=periodic_data, step_id=stage_data.step_id)
self.update_relative_data(existing_rec=prev_day_record.data,
relative_date=previous_date,
data_dict=previous_day_dict,
step_id=stage_data.step_id)
self.update_relative_data(existing_rec=next_day_record.data,
relative_date=next_date,
data_dict=next_day_dict,
step_id=stage_data.step_id)
if EnableAuditing.form_periodic_auditing:
bg_task.add_task(self.form_audit_model, today_record, submitted_data, form_props, date, next_date,
request_data,
stage_data.step_id,
next_day_record.data, next_day_record.manual_data)
except Exception as e:
logger.error(f"Failed to update periodic data in mongo, {e}")
if EnableAuditing.form_periodic_auditing:
bg_task.add_task(self.form_audit_model, today_record, submitted_data, form_props, date, next_date,
request_data,
stage_data.step_id,
next_day_record.data, next_day_record.manual_data, str(e.args))
return True
except Exception as e:
logger.error("Failed in save_periodic_data", e)
raise
def allow_all_manual_fields(self, tag_dict, next_day_dict, iot_param, project_id):
merge_dict = deepcopy(tag_dict)
merge_dict.update(next_day_dict)
merge_dict.update(iot_param)
self.common_utils.publish_data_to_kafka(merge_dict, project_id)
return True
@staticmethod
def add_time(next_day_time, next_day_dict, previous_day_time, previous_day_dict, kairos_dict, this_day_time,
this_day_dict):
if next_day_time not in next_day_dict:
next_day_dict[next_day_time] = {}
if previous_day_time not in previous_day_dict:
previous_day_dict[previous_day_time] = {}
if previous_day_time not in kairos_dict:
kairos_dict[previous_day_time] = {}
if this_day_time not in this_day_dict:
this_day_dict[this_day_time] = {}
if this_day_time not in kairos_dict:
kairos_dict[this_day_time] = {}
if next_day_time not in kairos_dict:
kairos_dict[next_day_time] = {}
def save_to_dicts(self, form_props, date, datetime_obj, _type, request_data: SaveForm, data, custom_service_list):
try:
this_day_dict, next_day_dict, previous_day_dict = ({} for _ in range(3))
prop_not_in_data = []
day_end = datetime_obj.replace(hour=23, minute=59, second=0).timestamp() * 1000
iot_param = {day_end: {}}
next_day_time = previous_day_time = this_day_time = ""
data, custom_service_list, kairos_dict, only_manual_dict = self.custom_handler.custom_data_list(
data, _type, request_data, form_props, custom_service_list)
for prop_key, current_props in form_props.items():
try:
if prop_key not in data.keys():
prop_not_in_data.append(prop_key)
continue
time = current_props.get('time')
if time:
this_day_time = self.common_utils.convert_str_to_ts(date, current_props.get('time'),
ui_time_format_data["yyyy-MM-dd HH:mm"],
request_data.tz)
next_day_time = self.common_utils.add_days_to_epoch(1, this_day_time, request_data.tz)
previous_day_time = self.common_utils.add_days_to_epoch(-1, this_day_time, request_data.tz)
self.add_time(next_day_time, next_day_dict, previous_day_time, previous_day_dict, kairos_dict,
this_day_time,
this_day_dict)
is_manual_data = current_props.get("manual_entry", "false") in ["true"]
is_next_day = current_props.get("next_day", "false") in ["true"]
is_previous_day = current_props.get("previous_day", "false") in ["true"]
tag = current_props.get("tag")
configured_properties = set(current_props.keys())
if all([self.ref_tag_time_manual_next.issubset(configured_properties), is_manual_data,
is_next_day]):
next_day_dict[next_day_time][tag] = data[prop_key]
kairos_dict[next_day_time][tag] = data[prop_key]
elif all([self.ref_tag_time_manual_previous.issubset(configured_properties), is_manual_data,
is_previous_day]):
previous_day_dict[previous_day_time][tag] = data[prop_key]
kairos_dict[previous_day_time][tag] = data[prop_key]
elif all([self.ref_tag_time_previous.issubset(configured_properties), is_previous_day]):
previous_day_dict[previous_day_time][tag] = data[prop_key]
elif all([self.ref_tag_time_next.issubset(configured_properties), is_next_day]):
next_day_dict[next_day_time][tag] = data[prop_key]
elif all([self.ref_tag_time_manual.issubset(configured_properties), is_manual_data]):
this_day_dict[this_day_time][tag] = data[prop_key]
kairos_dict[this_day_time][tag] = data[prop_key]
elif all([self.ref_tag_time.issubset(configured_properties)]):
this_day_dict[this_day_time][tag] = data[prop_key]
elif all([self.ref_tag_manual_entry.issubset(configured_properties), is_manual_data]):
iot_param[day_end][tag] = data[prop_key]
only_manual_dict[prop_key] = data[prop_key]
elif all([self.ref_manual_entry.issubset(configured_properties), is_manual_data]):
only_manual_dict[prop_key] = data[prop_key]
except Exception as e:
logger.error(e)
raise
return [this_day_dict, kairos_dict, next_day_dict, only_manual_dict, iot_param, previous_day_dict,
custom_service_list]
except Exception as e:
logger.error(e)
raise
def form_tag_dicts(self, form_props, date, datetime_obj, request_data: SaveForm, data, step_id, _type, bg_task,
request_obj):
custom_service_list = []
if _type in CustomObjects.custom_models_to_list:
custom_service_list.append(data)
[this_day_dict, kairos_dict, next_day_dict, only_manual_dict, iot_param, previous_day_dict,
custom_service_list] = self.save_to_dicts(
form_props,
date,
datetime_obj,
_type,
request_data,
data,
custom_service_list)
if custom_service_list:
self.custom_save(step_id, date, _type, custom_service_list, bg_task, request_data, request_obj)
return this_day_dict, kairos_dict, next_day_dict, only_manual_dict, iot_param, previous_day_dict
def update_relative_data(self, existing_rec, relative_date, data_dict, step_id):
try:
if not data_dict:
return
if not existing_rec:
self.periodic_conn.save_and_update_data(relative_date, step_id, dict(data=data_dict))
return
existing_dict = {x["ts"]: x["values"] for x in existing_rec}
new_dict = deepcopy(existing_dict)
for ts, val in data_dict.items():
if ts not in existing_dict.keys():
new_dict[ts] = val
else:
new_dict[ts] = {**existing_dict[ts], **val}
self.periodic_conn.save_and_update_data(relative_date, step_id, dict(data=new_dict))
return
except Exception as e:
logger.error("Failed in update_next_day_data", e)
raise
def form_audit_model(self, today_record, submitted_data, form_props, current_day, next_date, request_data, step_id,
next_day_record, next_manual, error=None):
try:
audits = list()
user_rec = self.user_conn.find_user(request_data.user_id)
user_name = user_rec.get("username")
df_list, current_manual = self.current_next_df_list(request_data, form_props, current_day,
next_date,
next_day_record, today_record)
utc_time = self.common_utils.get_time_now()
ip_address = self.common_utils.get_ip_of_user()
for each_df in df_list:
for index, row in each_df.iterrows():
if not row.get("values"):
row["values"] = "-"
row = row.replace({np.nan: None})
prop = row['prop']
old = row["values"] if row["values"] else ""
new = submitted_data.get(prop, "") if submitted_data.get(prop, "") else ""
are_equal = self.check_equality(old, new)
if are_equal or (old in ["", "-"] and new in ["", "-"]):
continue
tag_time = row["datetime"] if "datetime" in row and row["datetime"] != np.nan else None
audit_model = UserDataEntryRecord(type=AuditingKeys.periodic,
user_id=request_data.user_id,
user_name=user_name,
date_time=utc_time,
ip_address=ip_address,
source=AuditingKeys.user,
previous_value=old,
new_value=new,
property_name=prop,
tag=row.get("tag", ""),
task_id=request_data.task_id,
step_id=step_id,
stage_id=request_data.stage_id,
project_id=request_data.project_id,
)
if tag_time:
audit_model.tag_time = tag_time
if error:
audit_model.action_status = "failed"
audit_model.error_logs = error
audits.append(audit_model.dict())
if current_manual or next_manual:
for each_entry in [current_manual, next_manual]:
for key, val in each_entry.items():
old = val
new = submitted_data.get(key)
are_equal = self.check_equality(old, new)
if are_equal:
continue
audit_model = UserDataEntryRecord(type=AuditingKeys.periodic,
user_id=request_data.user_id,
user_name=user_name,
date_time=utc_time,
ip_address=ip_address,
source=AuditingKeys.user,
previous_value=old,
tag="manual_entered",
new_value=new,
property_name=key,
task_id=request_data.task_id,
step_id=step_id,
stage_id=request_data.stage_id,
project_id=request_data.project_id,
)
audits.append(audit_model.dict())
if error:
audit_model.action_status = "failed"
audit_model.error_logs = error
if not audits:
return True
self.common_utils.auditing_with_kafka(audits)
logger.info(f"Audited records successfully")
return True
except Exception as e:
logger.error(f"Failed in form_audit_model: {e}")
@staticmethod
def check_equality(old, new):
"""workaround_for_floats"""
if old == new:
return True
try:
if new:
new = float(new)
except ValueError:
pass
if all([isinstance(old, (int, float)), isinstance(new, (int, float)), old == new]):
return True
return False
def current_next_df_list(self, request_data, form_props, current_day, next_date, next_day_record,
today_record):
try:
present_df, next_df, form_df, current_manual = self.data_engine.get_current_and_next_df(request_data,
form_props,
next_day_record,
today_record)
if present_df.empty:
if "next_day" in form_df:
this_day_props = form_df[form_df["next_day"] != "true"]
else:
this_day_props = form_df.copy(deep=True)
present_df = this_day_props.rename_axis('prop').reset_index()
present_df["datetime"] = self.processor.add_timestamp_to_df(present_df, current_day, request_data.tz,
ui_time_format_data[full_date_format])
if next_df.empty and "next_day" in form_df:
next_day_props = form_df[form_df["next_day"] == "true"]
next_df = next_day_props.rename_axis('prop').reset_index()
next_df["datetime"] = self.processor.add_timestamp_to_df(next_df, next_date, request_data.tz,
ui_time_format_data[full_date_format])
elif not next_df.empty:
next_df["datetime"] = self.processor.add_timestamp_to_df(next_df, next_date, request_data.tz,
ui_time_format_data[full_date_format])
df_list = [present_df, next_df] if not next_df.empty else [present_df]
return df_list, current_manual
except Exception as e:
logger.error(f"Failed in current_next_df_list: {e}")
def custom_save(self, step_id, date, _type, data_list, bg_task, request_data: SaveForm, request_obj):
request_model = SaveTableRequest(replicate_type=_type,
data_list=data_list,
step_id=step_id,
date=date,
project_id=request_data.project_id,
tz=request_data.tz,
cookies=request_obj.cookies)
db = next(get_assistant_db())
bg_task.add_task(self.custom_handler.save_table_to_postgres, request_model, db)
return True
from copy import deepcopy
from scripts.constants.app_constants import SubmitAction
from scripts.constants.stage_constants import StageConstants
from scripts.db import mongo_client, TaskInstance, Constants, StepCollection, LogbookLinkInfo, Workflow, WorkflowSchema
from scripts.db.common_aggregates import CommonAggregates
from scripts.logging.logging import logger
class StageNavigation:
def __init__(self, project_id=None):
self.logbook_links_conn = LogbookLinkInfo(mongo_client=mongo_client, project_id=project_id)
self.tasks_conn = TaskInstance(mongo_client=mongo_client, project_id=project_id)
self.const_conn = Constants(mongo_client=mongo_client)
self.step_conn = StepCollection(mongo_client=mongo_client, project_id=project_id)
self.common_agg = CommonAggregates()
self.workflow_conn = Workflow(mongo_client=mongo_client, project_id=project_id)
def logbook_links(self, task_data, final_dict):
try:
logbook_links = self.logbook_links_conn.find_by_logbook_id(logbook_id=task_data.get('logbook_id'))
for each in logbook_links.external_links:
links_data = dict(label=each.get("display_title", ""),
link_type=each.get("link_type", ""),
linked_to=each.get("linked_to", ""),
menu_placement=each.get("menu_placement", ""),
type="external_link")
if each["menu_placement"] in final_dict:
final_dict[each["menu_placement"]].append(links_data)
else:
final_dict.update({each["menu_placement"]: [links_data]})
except Exception as e:
logger.exception(f"Error in logbook_links def: {e}")
def get_actions(self, workflow_permissions, nav_type, steps, mobility=False, ):
try:
button_view = self.const_conn.find_constant(_type="button_view_with_permissions",
filter_dict={"_id": 0})
button_view_dict, properties_dict, validate_step_dict = {}, {}, {}
button_properties = {}
for data in button_view.data:
button_view_dict.update({data.get("action"): data.get("button_label")})
button_properties.update({data.get("action"): data})
step_data = self.step_conn.get_data_by_aggregate(self.common_agg.get_step_details(steps=steps))
step_data = step_data[0] if step_data else dict()
permissions = []
for permission in workflow_permissions:
if "permissions" in permission:
permissions.extend(permission.get("permissions", []))
permissions = permissions if bool(permissions) else []
permissions = list(set(permissions))
permissions_dict = list()
# Add Mark stage complete button
if nav_type == "left" and not mobility:
permissions_dict.append(StageConstants.mark_as_completed)
if SubmitAction.save in permissions:
permissions.append(SubmitAction.refresh)
permissions = sorted(list(set(permissions)))
for permission in permissions:
if permission in button_view_dict and permission != SubmitAction.view:
permissions_temp_dict = {"label": button_view_dict.get(permission),
"value": permission,
**button_properties.get(permission, {})}
permissions_temp_dict.pop("button_label", None)
permissions_temp_dict.pop("permission_label", None)
permissions_temp_dict.pop("action", None)
if permission == SubmitAction.refresh:
permissions_temp_dict.update(action='onlySubmit')
permissions_dict.append(permissions_temp_dict)
return step_data, button_view_dict, permissions_dict
except Exception as e:
logger.exception(f"Error in logbook_links def: {e}")
@staticmethod
def check_permissions(step_permissions):
if SubmitAction.save in step_permissions and SubmitAction.view in step_permissions:
step_permissions.remove(SubmitAction.view)
if SubmitAction.save not in step_permissions:
return True
return False
def get_stages(self, steps, nav_type, user_role, workflow_permissions, stages_dict, stage_status_map,
stage_status_map_mobile, workflow_id, workflow_version, mobility=False):
try:
final_dict = {}
workflow_data: WorkflowSchema = self.workflow_conn.find_by_id(workflow_id, workflow_version)
step_data, button_view_dict, permissions_dict = \
self.get_actions(workflow_permissions, nav_type, steps, mobility=mobility)
for step in steps:
menu_placement_availability = step_data.get(step, {}).get("menu_placement")
if step not in step_data and not menu_placement_availability:
continue
if step_data[step]["menu_placement"] not in final_dict:
final_dict.update({step_data[step]["menu_placement"]: list()})
step_permissions = list()
actions_list = list()
action_values = set()
for permission in workflow_permissions:
if step == permission.get("step_id") and user_role == permission.get("user_role") and bool(
permission.get("permissions")):
step_permissions.extend(permission.get("permissions"))
for item in permission.get("permissions"):
if bool(button_view_dict.get(item)) and item != SubmitAction.view:
actions_list.append({"label": button_view_dict.get(item), "value": item})
action_values.add(item)
step_permissions = list(set(step_permissions))
if not step_permissions:
continue
if SubmitAction.save not in step_permissions and SubmitAction.view not in step_permissions:
continue
if SubmitAction.save in action_values and SubmitAction.refresh not in action_values:
action_values.add(SubmitAction.refresh)
read_only = self.check_permissions(step_permissions)
disabled_actions = list(set(button_view_dict.keys()) - action_values - {SubmitAction.view})
temp_json = {
"stage_id": stages_dict.get(step),
"value": stages_dict.get(step),
"step_id": step,
"label": step_data.get(step).get("display_title"),
"actions": actions_list,
"disabledActions": disabled_actions,
"status": stage_status_map.get(step, False),
"type": "step",
"readOnly": read_only,
"validation": workflow_data.validation.get(step, False)
}
if mobility:
temp_json.update(status=stage_status_map_mobile.get(step, False))
if stage_status_map.get(step, False) and not mobility:
temp_json.update(iconClass=StageConstants.mark_complete_icon,
iconColor=StageConstants.mark_complete_icon_color)
final_dict[step_data[step]["menu_placement"]].append(deepcopy(temp_json))
return final_dict, permissions_dict
except Exception as e:
logger.exception(f"Error in logbook_links def: {e}")
raise
from importlib import import_module
import requests
from fastapi import Request
from scripts.config.app_configurations import PathToServices
from scripts.constants.app_constants import SubmitAction, CommonStatusCode
from scripts.core.schemas.forms import CustomActionsModel
from scripts.core.schemas.forms import SaveForm
from scripts.db import TaskInstance, TaskInstanceData, StepCollection, mongo_client, TaskInstanceSchema
from scripts.db.mongo.ilens_assistant.collections.triggers import Trigger
from scripts.errors import RequiredFieldMissing, InternalError
from scripts.logging.logging import logger
from scripts.utils.formio_parser import check_required, parse_component
from scripts.utils.sql_db_utils import TicketEntry, SQLDBUtils
from scripts.utils.stage_parser import StageParser
class SubmitActions:
def __init__(self, project_id=None):
self.task_inst_conn = TaskInstance(mongo_client, project_id=project_id)
self.trigger_conn = Trigger(mongo_client, project_id=project_id)
self.task_data = TaskInstanceData(mongo_client, project_id=project_id)
self.step_collection = StepCollection(mongo_client, project_id=project_id)
self.stage_parser = StageParser(project_id=project_id)
def get_workflow_details(self, task_id):
task_details = self.task_inst_conn.find_by_task_id(task_id=task_id)
if not task_details:
return None
workflow_id = task_details.associated_workflow_id
workflow_version = task_details.associated_workflow_version
if not workflow_id or not workflow_version:
return None
return {"workflow_id": workflow_id, "workflow_version": workflow_version}
@staticmethod
def custom_user_actions(custom_action: CustomActionsModel):
try:
module_obj = import_module(f"scripts.core.engine.custom_actions.{custom_action.action['action_type']}")
class_obj = getattr(module_obj, "CustomAction")
message_exists, message = class_obj(custom_action).trigger_action()
return message_exists, message
except InternalError:
raise
except Exception as e:
logger.error(f"Exception occurred in custom_user_actions: {e}")
def get_trigger_data(self, workflow_id,
request_data: SaveForm,
workflow_version,
user_role,
db, request_obj: Request,
on_click=None):
try:
logger.info("Checking trigger data")
trigger_data = self.trigger_conn.fetch_by_id(workflow_id=workflow_id,
workflow_version=workflow_version,
role=user_role,
on_click=on_click)
actions = trigger_data.actions
task_details: TaskInstanceSchema = self.task_inst_conn.find_by_task_id(task_id=request_data.task_id)
stages = task_details.stages
for state in actions:
if state["action_type"] in ["rest_api", "mark_completed", "send_email", "notification", "create_batch",
"finish_batch"]:
custom_action_model = CustomActionsModel(task_details=task_details, action=state,
submitted_data=request_data.submitted_data,
stage_id=request_data.stage_id,
on_click=on_click, tz=request_data.tz,
date=request_data.date, project_id=request_data.project_id,
request_obj=request_obj)
message_exists, message = self.custom_user_actions(custom_action_model)
if message_exists:
return message_exists, message
continue
if state["from_state"] == request_data.current_status:
current_status = state["to_state"]
data = {"current_status": current_status}
self.task_inst_conn.update_instance_task(task_id=request_data.task_id, data=data, upsert=False)
insert_json = {"task_status": current_status}
request_json = {"service_type": 'update',
"data": {"task_id": request_data.task_id, "project_id": request_data.project_id,
"data": insert_json}}
try:
api_url = f'{PathToServices.AUDIT_PROXY}/task/tracker'
headers = \
{
'login-token': request_obj.headers.get('login-token',
request_obj.cookies.get('login-token')),
'projectId': request_obj.cookies.get("projectId", request_obj.cookies.get("project_id",
request_obj.headers.get(
"projectId"))),
'userId': request_obj.cookies.get("user_id",
request_obj.cookies.get("userId",
request_obj.headers.get(
"userId")))}
resp = requests.post(url=api_url, cookies=request_obj.cookies, headers=headers,
json=request_json)
logger.debug(f"Resp Code:{resp.status_code}")
if resp.status_code in CommonStatusCode.SUCCESS_CODES:
response = resp.json()
logger.debug(f"Response:{response}")
except requests.exceptions.ConnectionError as e:
logger.exception(e.args)
continue
if on_click == SubmitAction.save:
data = self.update_next_stage({}, stages, request_data.stage_id)
if data:
self.task_inst_conn.update_instance_task(task_id=request_data.task_id, data=data, upsert=False)
return False, False
except InternalError:
raise
except Exception as e:
logger.error(f"Exception occurred in get_trigger_data: {e}")
def check_required_fields_filled(self, stages_list: list):
required_field_missing = False
submitted_data_all_stages, step_mapping = self.task_data.find_many(stages_list)
stage_form_components = self.step_collection.find_many(list(step_mapping.values()))
label_missing = ""
for stage, stage_data in submitted_data_all_stages.items():
data = stage_data.get("data", {}) if stage_data else {}
if not bool(data):
continue
for field, val in data.items():
step = step_mapping[stage]
if all([step in stage_form_components.keys(), check_required(field, stage_form_components[step]),
not bool(val)]):
label_missing = parse_component(stage_form_components[step], field, "label")
required_field_missing = True
break
if required_field_missing:
raise RequiredFieldMissing(f"Required field missing: {label_missing}")
return True
def update_next_stage(self, data, stages_list, current_stage):
try:
left_stages = self.stage_parser.get_stage_parser(stages_list).get("left", [])
if left_stages[-1] == current_stage:
data.update({"current_stage": left_stages[left_stages.index(current_stage)]})
elif current_stage in left_stages:
data.update({"current_stage": left_stages[left_stages.index(current_stage) + 1]})
except Exception as e:
logger.exception(f'Exception occurred in update next stage definition {e.args}')
return data
@staticmethod
def update_task_status(status, db, task_id):
_tue_ = TicketEntry
_sbu_ = SQLDBUtils(db=db)
update_json = {_tue_.column_event_status(): status}
try:
_sbu_.update(
table=_tue_,
update_json=update_json,
filters=[{_sbu_.key_filter_expression(): 'eq',
_sbu_.key_filter_column(): _tue_.workflow_id,
_sbu_.key_filter_value(): task_id
}]
)
return True
except Exception as e:
logger.error(f"Error occurred while updating record {e}", exc_info=_sbu_.enable_traceback())
raise
from scripts.logging.logging import logger
class TaskEngine:
def __init__(self, project_id=None):
pass
@staticmethod
def get_hierarchy(logbook_data, stage_json):
try:
if logbook_data.get("hierarchy_dict"):
logbook_hierarchy = dict(hierarchyLevel=logbook_data.get("hierarchy_level", ""))
logbook_hierarchy |= logbook_data["hierarchy_dict"]
return logbook_hierarchy
elif bool(stage_json.get("task_creation_data", {})) and "hierarchy" in stage_json.get("task_creation_data",
{}):
return stage_json["task_creation_data"]["hierarchy"]
except Exception as e:
logger.exception(f"Error Occurred while fetching the hierarchy details from task ,{e}")
raise
@staticmethod
def get_hierarchy_string(hierarchy, site_templates):
try:
hierarchy_id_list = []
for data in site_templates:
if hierarchy and hierarchy.get(data):
if isinstance(hierarchy.get(data), dict):
hierarchy_id_list.append(hierarchy.get(data).get("value"))
else:
hierarchy_id_list.append(hierarchy.get(data))
return '$'.join(hierarchy_id_list)
except Exception as e:
logger.exception(f"Error Occurred while converting to hierarchy_id from task ,{e}")
raise
from scripts.core.schemas.comments import TagList
from scripts.db import mongo_client
from scripts.db.mongo.ilens_configuration.collections.site_conf import SiteConf
from scripts.logging.logging import logger
class CommentHandler:
def __init__(self, project_id=None):
self.site_conn = SiteConf(mongo_client, project_id=project_id)
def get_tags_list(self, request_data: TagList):
try:
type(request_data)
hierarchy = request_data.hierarchy
if not hierarchy:
return list()
hierarchy_level = hierarchy.get("hierarchyLevel")
site_id = hierarchy.get("site")
hierarchy_details = self.site_conn.find_site_by_site_id(site_id=site_id, filter_dict={"_id": 0})
if not hierarchy_details:
return list()
tags = list()
if hierarchy_level == "site":
site_info = hierarchy_details.get("site_info")
tags = site_info.get("tags", [])
else:
details = hierarchy_details.get(hierarchy_level)
hierarchy_id = f"{hierarchy_level}_id"
for data in details:
if data[hierarchy_id] == hierarchy.get(hierarchy_level):
tags = data.get("tags", [])
return tags
except Exception as e:
logger.error(f"Exception while listing tags {str(e)}")
raise
from copy import copy
from importlib import import_module
from scripts.config.app_configurations import PathToServices
from scripts.constants import CustomObjects
from scripts.constants.api import FormEndPoints
from scripts.core.schemas.custom_models import SaveTableRequest
from scripts.db import mongo_client
from scripts.db.mongo.ilens_asset_model.asset_model_details import AssetDetail
from scripts.db.mongo.ilens_configuration.collections.site_conf import SiteConf
from scripts.db.psql.models.oee_production_losses import DBModelProductionLosses
from scripts.logging.logging import logger
from scripts.utils.common_utils import CommonUtils
from scripts.utils.formio_parser import get_field_props_by_keys
class CustomHandler:
def __init__(self, project_id=None):
self.table = DBModelProductionLosses
self.metadata_proxy = PathToServices.METADATA_SERVICES
self.get_asset_path = f'{self.metadata_proxy}{FormEndPoints.api_search_asset}'
self.common_utils = CommonUtils()
self.asset_model_detail = AssetDetail(mongo_client, project_id=project_id)
self.site_conf = SiteConf(mongo_client, project_id=project_id)
self.back_fill_api = f'{PathToServices.DATA_ENGINE}{FormEndPoints.api_backfill}'
def concatenate_asset_model_with_tag(self, iter_data, form_props):
tag_dict = {}
try:
for each in iter_data:
each_updated = copy(each)
if each.get('assets'):
asset_id = each.get('assets', {}).get('asset', "")
asset_model = each.get('assets', {}).get('asset_model', "")
# each_updated.pop('asset', None)
# each.pop('asset', None)
if isinstance(asset_id, str) and asset_model:
asset_data = self.asset_model_detail.find_asset_detail_by_id(asset_id=asset_model.split('$')[0],
asset_version=
asset_model.split('$')[1])
site_data = self.site_conf.find_site_by_site_id(asset_id.split('$')[0]) if asset_id else {}
hierarchy_dict = {"Plant" if data.split("_")[0] == "dept" else data.split("_")[
0].title(): self.common_utils.get_hierarchy_name(data, site_data) for
data in asset_id.split("$")}
each_updated.update(
{self.table.asset.key: asset_id,
self.table.asset_model.key: asset_model})
each_updated.update(asset_model_name=asset_data.get('asset_model_name', ""),
asset_name='>'.join(list(hierarchy_dict.values())))
elif isinstance(asset_id, dict) and asset_model:
each_updated.update({self.table.asset.key: asset_id.get("value", ""),
self.table.asset_model.key: asset_model.get("value", ""),
"asset_name": asset_id.get("label", ""),
"asset_model_name": asset_model.get("label", "")})
asset_id = asset_id.get("value", "")
consider_tag = get_field_props_by_keys(form_props, "only_tag")
only_tag = list(consider_tag.values())[0] if consider_tag else ""
asset_hierarchy = '$'.join([asset_id, only_tag])
tag_value = each.get(list(consider_tag.keys())[0].split('.')[-1]) if list(
consider_tag.keys()) else ""
tag_dict.update({asset_hierarchy: tag_value})
each.update(each_updated)
return tag_dict
except Exception as e:
logger.error(e)
return tag_dict
def custom_data_list(self, data, _type, request_data, form_props, custom_service_list):
kairos_dict = {}
only_manual_dict = {}
try:
for k, v in data.items():
if isinstance(v, list):
if _type == CustomObjects.oee_production_losses:
for rec in v:
if all(["shift" in rec, "from_time" in rec, "to_time" in rec]) and not rec.get("shift"):
from_time = rec["from_time"]
to_time = rec["to_time"]
rec["shift"] = self.common_utils.get_shift(request_data.project_id, from_time, to_time)
tag_dict = self.concatenate_asset_model_with_tag(v, form_props)
kairos_dict.update({request_data.date: tag_dict})
only_manual_dict.update({k: v})
if _type in CustomObjects.model_types_for_psql_tables:
custom_service_list = [v, data.get("loss_in_time"),
data.get("overall_loss_in_minutes"),
form_props.get("overall_loss_in_minutes", {}).get(
"save_to_postgres_step")]
return data, custom_service_list, kairos_dict, only_manual_dict
except Exception as e:
logger.exception(e)
return data, custom_service_list, kairos_dict, only_manual_dict
@staticmethod
def save_table_to_postgres(request_data: SaveTableRequest, db):
try:
model_file = import_module(f"scripts.db.psql.query_layer.{request_data.replicate_type}")
db_model = getattr(model_file, "QueryLayer")
logger.info(f"ONTO postgres: {request_data.dict()}")
for itr, item in enumerate(request_data.data_list):
request_data.data_list[itr] = {x: y for x, y in item.items() if y != "-"}
db_model(db).insert_data(request_data.data_list,
request_data.step_id,
request_data.date)
logger.info(f"Saved data for table type {request_data.replicate_type}")
except TypeError:
raise
except ModuleNotFoundError:
raise
except Exception as e:
logger.error(f"Failed to save data in postgres, {e}")
import time
from fastapi import Request
from scripts.core.engine.form_renderer import FormRenderingEngine
from scripts.core.schemas.forms import SaveForm
from scripts.errors import InternalError
from scripts.logging.logging import logger
from scripts.utils.common_utils import CommonUtils
from scripts.db.mongo.ilens_assistant.collections.task_instance_data import TaskInstanceData
from scripts.db.mongo.ilens_assistant.collections.steps import StepCollection
from scripts.db.mongo.ilens_assistant.collections.form_props import FormProps
from scripts.db.mongo.ilens_assistant.collections.task_instances import TaskInstance
from scripts.db.mongo.ilens_assistant.collections.task_info import TaskCollection
from scripts.utils.formio_parser import get_field_props_by_keys
from scripts.db import mongo_client
class FormHandler:
def __init__(self, project_id=None):
self.form_engine = FormRenderingEngine(project_id)
self.comm_utils = CommonUtils(project_id=project_id)
self.task_instance_data = TaskInstanceData(mongo_client=mongo_client, project_id=project_id)
self.step_conn = StepCollection(mongo_client=mongo_client, project_id=project_id)
self.form_props = FormProps(mongo_client=mongo_client, project_id=project_id)
self.task_instance = TaskInstance(mongo_client=mongo_client, project_id=project_id)
self.task_info = TaskCollection(mongo_client=mongo_client, project_id=project_id)
async def form_renderer(self, request_data: SaveForm, user_id, request_obj: Request, save=False, submit=False,
mobile=False, db=None, bg_task=None,
):
try:
if not self.comm_utils.convert_trigger_date_to_epoch(request_data.triggers):
request_data.date = time.time() * 1000
if save:
await self.form_engine.save_data_to_stage(request_data, bg_task, db, user_id, request_obj=request_obj)
return "Form saved successfully", True
elif submit:
msg = await self.form_engine.submit_data(request_data, db, user_id, request_obj, mobile=mobile)
return msg, True
elif mobile:
await self.form_engine.save_data_to_stage(request_data, bg_task, db, user_id, request_obj=request_obj)
msg = await self.form_engine.submit_data(request_data, db, user_id, request_obj, mobile=mobile)
return msg, True
else:
form_data = await self.form_engine.form_template_with_data(request_data, request_obj=request_obj)
return "", form_data
except InternalError:
raise
except Exception as e:
logger.error(f"Exception while saving record {str(e)}")
raise
import time
from datetime import datetime
import pytz
from scripts.core.schemas.stages import SaveRemarks, FetchRemarks
from scripts.db import TaskInstanceData, mongo_client
from scripts.db.mongo.ilens_assistant.collections.project_remarks import ProjectRemarks
from scripts.db.mongo.ilens_configuration.collections.tags import Tag
from scripts.logging.logging import logger
from scripts.utils.common_utils import CommonUtils
class RemarkHandler:
def __init__(self, project_id=None):
self.stage_conn = TaskInstanceData(mongo_client=mongo_client, project_id=project_id)
self.tag_conn = Tag(mongo_client=mongo_client, project_id=project_id)
self.project_info_conn = ProjectRemarks(mongo_client=mongo_client, project_id=project_id)
self.common_utils = CommonUtils(project_id=project_id)
def save_remarks_data(self, request_data: SaveRemarks, user_id):
try:
if request_data.stage_id:
stage_record = self.stage_conn.find_by_id(stage_id=request_data.stage_id)
if bool(stage_record):
stage_remarks = stage_record.remarks
user_name = self.common_utils.get_user_name_from_id(user_id)
request_data.updated_on = int(time.time() * 1000)
request_data.user_id = user_id
request_data.user_name = user_name
if request_data.type == "parameter_specific":
tag_name = self.tag_conn.find_name_by_id(
tag_id=request_data.parameter_specific.get("parameter"))
request_data.parameter_specific["parameter_id"] = request_data.parameter_specific.get(
"parameter")
request_data.parameter_specific["parameter"] = tag_name
stage_remarks.append(request_data.dict(exclude_none=True))
self.stage_conn.update_stage_data(stage_id=request_data.stage_id, data={"remarks": stage_remarks})
elif request_data.project_id:
project_record = self.project_info_conn.find_by_id(project_id=request_data.project_id)
if not project_record:
project_record = dict()
global_remarks = project_record.get("remarks", list())
user_name = self.common_utils.get_user_name_from_id(user_id)
request_data.updated_on = int(time.time() * 1000)
request_data.user_id = user_id
request_data.user_name = user_name
if request_data.type == "parameter_specific":
tag_name = self.tag_conn.find_name_by_id(
tag_id=request_data.parameter_specific.get("parameter"))
request_data.parameter_specific["parameter_id"] = request_data.parameter_specific.get(
"parameter")
request_data.parameter_specific["parameter"] = tag_name
global_remarks.append(request_data.dict(exclude_none=True))
self.project_info_conn.update_project_info(request_data.project_id, global_remarks, upsert=True)
return True
except Exception as e:
logger.error(f"Error occurred in fetch stage stepper {e}")
raise
def fetch_remarks(self, request_data: FetchRemarks):
try:
if request_data.stage_id:
stage_record = self.stage_conn.find_by_id(stage_id=request_data.stage_id)
if not stage_record.remarks:
return dict()
for each in stage_record.remarks:
time_convert = datetime.fromtimestamp(each["updated_on"] / 1000,
tz=pytz.timezone(request_data.timezone
)). \
strftime("%d %b %y, %H:%M")
each["updated_on"] = time_convert
if each.get("type") == "parameter_specific" and bool(
each.get("parameter_specific", {}).get("observed_time")):
time_convert = datetime.fromtimestamp(
each.get("parameter_specific", {}).get("observed_time") / 1000,
tz=pytz.timezone(request_data.timezone
)). \
strftime("%d %b %y, %H:%M")
each["parameter_specific"]["observed_time"] = time_convert
response = dict(comments=stage_record.remarks)
return response
elif request_data.project_id:
global_remarks = self.project_info_conn.find_by_id(project_id=request_data.project_id)
if not global_remarks:
return dict()
for each in global_remarks["remarks"]:
time_convert = datetime.fromtimestamp(each["updated_on"] / 1000,
tz=pytz.timezone(request_data.timezone
)). \
strftime("%d %b %y, %H:%M")
each["updated_on"] = time_convert
if each.get("type") == "parameter_specific" and bool(
each.get("parameter_specific", {}).get("observed_time")):
time_convert = datetime.fromtimestamp(
each.get("parameter_specific", {}).get("observed_time") / 1000,
tz=pytz.timezone(request_data.timezone
)). \
strftime("%d %b %y, %H:%M")
each["parameter_specific"]["observed_time"] = time_convert
response = dict(comments=global_remarks["remarks"])
return response
except Exception as e:
logger.error(f"Error occurred in fetch stage stepper {e}")
raise
import traceback
from datetime import datetime
import pytz
from fastapi import Request
from scripts.config.app_configurations import DatabaseConstants, EnableAuditing
from scripts.constants import StepCategories, FactorsInTriggerCompletion
from scripts.constants.app_constants import AuditingKeys
from scripts.core.engine.component_manipulation import ComponentManipulation
from scripts.core.engine.stage_navigation import StageNavigation
from scripts.core.handlers.form_handler import FormHandler
from scripts.core.schemas.auditing import UserDataEntryRecord
from scripts.core.schemas.stages import StagesList, GetKDataRequest, TriggerData, MarkTaskCompleteRequest, \
CopyPropertyValues
from scripts.db import mongo_client, PeriodicData, TaskInstanceData, TaskInstance, \
StepCollection, Workflow, Constants, \
LogbookLinkInfo, User
from scripts.db.common_aggregates import CommonAggregates
from scripts.db.mongo.ilens_assistant.collections.form_props import FormProps
from scripts.db.mongo.ilens_assistant.collections.logbook import LogbookInfo
from scripts.db.mongo.ilens_assistant.collections.workflow_permissions import Permissions
from scripts.errors import StepsNotConfigured, ImplementationError
from scripts.logging.logging import logger
from scripts.utils.common_utils import CommonUtils
from scripts.utils.formio_parser import get_field_props
from scripts.utils.mongo_util import MongoCollectionBaseClass
from scripts.utils.mqtt_util import push_notification
from scripts.utils.stage_parser import StageParser
class StageHandler:
def __init__(self, project_id=None):
self.stage_conn = TaskInstanceData(mongo_client=mongo_client, project_id=project_id)
self.logbook_conn = LogbookInfo(mongo_client=mongo_client, project_id=project_id)
self.workflow_conn = Workflow(mongo_client=mongo_client, project_id=project_id)
self.tasks_conn = TaskInstance(mongo_client=mongo_client, project_id=project_id)
self.step_conn = StepCollection(mongo_client=mongo_client, project_id=project_id)
self.const_conn = Constants(mongo_client=mongo_client)
self.workflow_permissions = Permissions(mongo_client=mongo_client, project_id=project_id)
self.periodic_conn = PeriodicData(mongo_client, project_id=project_id)
self.form_props = FormProps(mongo_client, project_id=project_id)
self.common_utils = CommonUtils(project_id=project_id)
self.periodic_job_fails = MongoCollectionBaseClass(mongo_client, "ilens_ebpr", "periodic_job_fails")
self.periodic_job_fails.project_id = project_id
self.trigger_col = MongoCollectionBaseClass(mongo_client, DatabaseConstants.ilens_assistant_db, "triggers")
self.trigger_col.project_id = project_id
self.common_agg = CommonAggregates()
self.manipulate_comp = ComponentManipulation(project_id=project_id)
self.stage_nav_engine = StageNavigation(project_id=project_id)
self.manipulate_comp = ComponentManipulation(project_id=project_id)
self.logbook_links = LogbookLinkInfo(mongo_client=mongo_client, project_id=project_id)
self.user = User(mongo_client=mongo_client)
self.stage_parser = StageParser(project_id=project_id)
self.form_handler = FormHandler(project_id)
def get_stages_list(self, request_data: StagesList, user_id, nav_type):
try:
user_role = self.common_utils.get_user_roles_by_project_id(user_id=user_id,
project_id=request_data.project_id)
user_role = user_role[0] if user_role else ""
task_data = self.tasks_conn.find_by_task_id(task_id=request_data.task_id)
task_data = task_data.dict() if task_data else dict()
task_status = task_data.get("task_meta_details", {}).get("task_status")
task_status = "resumed" if not task_status else task_status
stages = task_data.get("stages")
stages_dict = dict()
steps = list()
if not bool(stages):
raise StepsNotConfigured("For the selected Task Stages are not configured")
stage_data = self.stage_conn.find_data_for_multiple_stages(stages_list=stages)
stage_status_map, stage_status_map_mobile = {}, {}
for stage in stage_data:
stage_id = stage["stage_id"]
stages_dict.update({stage.get("step_id"): stage_id})
steps.append(stage.get("step_id"))
status_value = stage.get("status", False)
stage_status_map.update({stage.get("step_id"): status_value})
stage_status_map_mobile.update({stage.get("step_id"): bool(stage.get("step_data"))})
workflow_permissions = self.workflow_permissions.get_data_by_aggregate(
self.common_agg.get_workflows_permissions(workflow_id=request_data.workflow_id,
workflow_version=request_data.workflow_version,
workflow_status=request_data.task_status,
user_role=user_role))
final_dict, permissions_dict = self.stage_nav_engine.get_stages(steps, nav_type, user_role,
workflow_permissions, stages_dict,
stage_status_map,
stage_status_map_mobile,
request_data.workflow_id,
request_data.workflow_version,
mobility=request_data.mobile)
if all([nav_type == "left", not final_dict.get("left_navigation")]):
raise PermissionError(f"""1. Access permissions not granted to this task. <br/>
2. Left navigation stages not added. <br/>
3. Current task status does not have configured permissions""")
self.stage_nav_engine.logbook_links(task_data, final_dict)
if nav_type:
return_json = dict(stages=final_dict.get(f"{nav_type}_navigation", []), actions=permissions_dict,
task_status=task_status)
return return_json
return_json = dict(stages=final_dict, actions=permissions_dict, task_status=task_status)
return return_json
except Exception as e:
logger.error(e.args)
raise
@staticmethod
def get_stage_status(task_data):
stages_status_map = {}
end = False
for each in task_data.get("stages"):
if task_data.get("current_stage") == each and task_data.get("current_stage") != "completed":
end = True
if not end:
stages_status_map.update({each: True})
else:
stages_status_map.update({each: False})
return stages_status_map
def add_periodic_stage_data(self, request_data: GetKDataRequest, trigger_method=AuditingKeys.data_published):
try:
date = str(self.common_utils.get_time_by_ts(request_data.ts / 1000, request_data.tz).date())
data_pushed = dict(ts=request_data.ts, values=request_data.values)
self.periodic_conn.save_and_update_periodic_data(date, request_data.step_id, data_pushed)
if EnableAuditing.periodic_entry_auditing:
self.audit_periodic_data(request_data.values, request_data.step_id, request_data.ts,
source=trigger_method)
return True
except Exception as e:
if EnableAuditing.periodic_entry_auditing:
self.audit_periodic_data(request_data.values, request_data.step_id, request_data.ts, e, trigger_method)
logger.error(f"Exception while saving record {str(e)}")
def get_tags(self, step_id, missing_tag):
try:
tag_list = list()
if not missing_tag:
step = self.form_props.find_by_id(step_id)
if not bool(step.form_info):
return list()
form_props_dict = get_field_props(step.form_info, "time_associated", "true")
for element, prop in form_props_dict.items():
if "manual_entry" in prop.keys() and prop["manual_entry"] == "true":
continue
for key, val in prop.items():
if key == "tag":
tag_list.append(val)
return list(set(tag_list))
else:
record_in_job_fail = self.periodic_job_fails.find_one({"metadata.step_id": step_id})
if record_in_job_fail:
missing_tags = record_in_job_fail.get("missed_tags", list())
return missing_tags
return list()
except Exception as e:
logger.error(f"Exception in get_tags {str(e)}")
raise
def get_time_list(self, step_id):
try:
time_list = list()
step = self.form_props.find_by_id(step_id)
if not bool(step.form_info):
return list()
form_props_dict = get_field_props(step.form_info, "time_associated", "true")
for element, prop in form_props_dict.items():
for key, val in prop.items():
if key == "time":
time_list.append(val)
return list(set(time_list))
except Exception as e:
logger.error(f"Exception in get_time_list {str(e)}")
raise
async def get_tag_and_time_list(self, step_id):
try:
tag_times = dict()
step = self.form_props.find_by_id(step_id)
if not bool(step.form_info):
return {}
form_props_dict = get_field_props(step.form_info, "time_associated", "true")
for element, prop in form_props_dict.items():
if "manual_entry" in prop.keys() and prop["manual_entry"] in ["true", "True"]:
continue
if prop["time"] not in tag_times:
tag_times[prop["time"]] = []
if "tag" in prop and "time" in prop and prop["tag"] not in tag_times[prop["time"]]:
tag_times[prop["time"]].append(prop["tag"])
return tag_times
except Exception as e:
logger.error(f"Exception in get_tags {str(e)}")
raise
async def audit_triggered_data(self, trigger_method, step_id, error=None):
utc_time = self.common_utils.get_time_now()
ip_address = self.common_utils.get_ip_of_user()
try:
audit_model = UserDataEntryRecord(type=AuditingKeys.periodic,
user_id="machine-triggered",
user_name="machine-triggered",
date_time=utc_time,
ip_address=ip_address,
source=trigger_method,
step_id=step_id,
new_value="failed_to_trigger"
)
if error:
audit_model.error_logs = error
audit_model.action_status = "failed"
self.common_utils.auditing_with_kafka([audit_model.dict()])
except Exception as e:
logger.error(f"Error while auditing in add_triggered_data {str(e)}")
def audit_periodic_data(self, values, step_id, ts, error=None, source=None):
utc_time = self.common_utils.get_time_now()
ip_address = self.common_utils.get_ip_of_user()
try:
tag_audits = []
for k, v in values.items():
audit_model = UserDataEntryRecord(type=AuditingKeys.periodic,
user_id=AuditingKeys.data_published,
user_name=AuditingKeys.data_published,
date_time=utc_time,
ip_address=ip_address,
source=source,
new_value=v,
tag=k,
step_id=step_id,
tag_time=ts)
tag_audits.append(audit_model.dict())
if error:
audit_model.action_status = "failed"
audit_model.error_logs = error
if not tag_audits:
return True
self.common_utils.auditing_with_kafka(tag_audits)
logger.info(f"Audited record successfully")
return True
except Exception as e:
logger.error(f"Failed in audit_periodic_data for tags: {e}")
async def add_triggered_data(self, option, request_data: TriggerData, real_time, restrict_blanks, project_id,
request_obj: Request):
try:
logger.info(f"Triggered with step_id: {request_data.step_id}")
data = self.manipulate_comp.table_component(option=option, real_time=real_time,
step_id=request_data.step_id, project_id=project_id,
data=request_data, request_obj=request_obj,
restrict_blanks=restrict_blanks)
if not data:
return True
logger.info(f"Storing data to periodic data col: {data}")
data_to_mongo = GetKDataRequest(values=data,
ts=request_data.trigger_time,
tz=request_data.tz,
step_id=request_data.step_id)
if EnableAuditing.form_non_periodic_auditing:
self.add_periodic_stage_data(data_to_mongo, request_data.trigger_method)
except ImplementationError:
raise
except Exception as e:
if EnableAuditing.form_non_periodic_auditing:
await self.audit_triggered_data(request_data.trigger_method, request_data.step_id, e)
logger.error(f"Exception in add_triggered_data {str(e)}")
raise
async def get_periodic_steps(self, skip, limit):
try:
periodic_steps = self.step_conn.find({"step_category": StepCategories.PERIODIC},
{"step_id": 1, "_id": 0},
skip=skip, limit=limit)
return [x["step_id"] for x in periodic_steps]
except Exception as e:
logger.error(f"Exception in get_periodic_steps {str(e)}")
raise
async def trigger_task_completion(self, factor):
try:
if factor not in FactorsInTriggerCompletion.CONSOLIDATED:
raise NotImplementedError
all_triggers = self.trigger_col.find(
query={"actions.trigger_at": factor, "actions.action_type": "auto_complete",
"trigger_type": "system_init"})
for trigger in all_triggers:
workflow_id = trigger.get("workflow_id")
workflow_version = trigger.get('workflow_version')
for action in trigger.get("actions", []):
from_state = action.get("from_state")
to_state = action.get("to_state")
if not all([workflow_id, workflow_version, from_state, to_state]):
raise KeyError
self.tasks_conn.update_by_workflow_id(workflow_id=workflow_id,
workflow_version=workflow_version,
from_state=from_state,
to_state=to_state)
return True
except Exception as e:
logger.error(f"Exception in trigger_task_completion {str(e)}")
raise
async def mark_task_complete(self, request_data: MarkTaskCompleteRequest):
try:
self.tasks_conn.update_by_task_id(task_id=request_data.task_id,
from_state=request_data.from_status,
to_state=request_data.to_status)
if request_data.update_status:
data = self.stage_conn.find_all_data_by_task_id(request_data.task_id)
for update_data in data:
if "step_data" in update_data:
update_data["step_data"]["data"][request_data.update_key] = request_data.update_value
self.stage_conn.update_stage_data(update_data["stage_id"], update_data)
return True
except Exception as e:
logger.error(f"Exception in mark_task_complete {str(e)}")
raise
async def send_notification_for_roles(self, request_data):
try:
task_data = self.tasks_conn.find_by_task_id(request_data.task_id)
trigger_data = self.trigger_col.find_one({"workflow_id": task_data.associated_workflow_id,
"workflow_version": task_data.associated_workflow_version})
user_roles, step_name = [], ""
if task_data.master_details.get('master_steps'):
step_id = task_data.master_details.get('master_steps')[0]
step_name = self.step_conn.fetch_step_name(step_id=step_id)
master_task_data = self.tasks_conn.find_by_task_id(task_data.master_details.get('master_task_id', ""))
logbook_name = self.logbook_conn.find_by_id(logbook_id=master_task_data.logbook_id).logbook_name
if trigger_data.get("actions", []):
for each in trigger_data.get("actions", []):
if each.get("action_type", "") == "notification":
user_roles = each.get("user_roles", [])
break
notification = dict(
type="ilens_assistant",
message=f"{request_data.message} Step: {step_name}, Logbook : {logbook_name}",
notification_message="notification Generated Successfully",
notification_status="success",
available_at=datetime.now().astimezone(
pytz.timezone(request_data.tz)).strftime("%d-%m-%Y %I:%M%p"),
mark_as_read=False
)
try:
user_data = self.user.find_user_data_with_roles(user_roles, project_id=request_data.project_id)
for each in user_data:
push_notification(notification, each.get("user_id"))
return True
except Exception as e:
notification.update(type="ilens_assistant",
message="notification failed to generate",
notification_message="Failed to send notification",
notification_status="failed")
logger.error(f"Error while sending notification {e.args}")
logger.error(traceback.format_exc())
except Exception as e:
logger.error(f"Exception in sending notification {str(e)}")
raise
async def copy_property_values(self, request_data: CopyPropertyValues, user_id, bg_task, request_obj, db):
try:
# task_data = self.tasks_conn.find_by_task_id(task_id=request_data.task_id)
step_data = self.step_conn.fetch_one_step(step_id=request_data.to_step_id)
if step_data.get("step_category", "") == StepCategories.NON_PERIODIC:
request_data.submitted_data.get("data", {}).update(request_data.property_dict)
self.stage_conn.update_by_task_step_id(request_data.task_id, request_data.to_step_id,
request_data.submitted_data)
elif step_data.get("step_category", "") == StepCategories.PERIODIC:
if not request_data.periodic_date:
request_data.periodic_date = datetime.now().strftime("%Y-%m-%d")
periodic_data = self.periodic_conn.find_by_date_and_step(_date=request_data.periodic_date,
step_id=request_data.to_step_id)
manual_data = periodic_data.manual_data
manual_data.update(request_data.property_dict)
self.periodic_conn.update_data_with_date_periodic(_date=request_data.periodic_date,
step_id=request_data.to_step_id,
manual_data={"manual_data": manual_data})
return True
except Exception as e:
logger.error(f"Exception while copying data from one step to another step{str(e)}")
import os
import re
from copy import deepcopy
import openpyxl
import pandas as pd
import requests
from fastapi import Request
from scripts.config.app_configurations import PathToStorage, PathToServices
from scripts.constants.api import StageDataEndPoints
from scripts.constants.app_constants import CommonStatusCode
from scripts.constants.date_constants import date_time_with_hour
from scripts.constants.template_constants import TemplateStorage
from scripts.core.schemas.stages_data import TemplateListRequest, TemplateListResponse, \
FetchTemplate, TemplateTableOptions, TemplateKeyValuePairs, UploadedFileList, UploadedFileListResponse, \
DeleteDataFile, DeleteTemplate
from scripts.db import mongo_client, User
from scripts.db.mongo.ilens_assistant.aggregations.logbook import LogbookInfoAggregate
from scripts.db.mongo.ilens_assistant.aggregations.step_data_files import StepDataFilesAggregate
from scripts.db.mongo.ilens_assistant.aggregations.step_templates import TemplateInfoAggregate
from scripts.db.mongo.ilens_assistant.collections.logbook import LogbookInfo
from scripts.db.mongo.ilens_assistant.collections.step_data_files import StepDataFiles
from scripts.db.mongo.ilens_assistant.collections.step_templates import StepTemplates
from scripts.db.mongo.ilens_configuration.aggregations.users import UsersAggregate
from scripts.db.mongo.ilens_configuration.collections.constants import Constants
from scripts.errors import DuplicateTemplateNameError, BulkUploadError, ColumnsMisMatch, InvalidValueFound
from scripts.logging.logging import logger
from scripts.utils.common_utils import CommonUtils
from scripts.utils.ilens_publish_data import DataPush
class StagesData:
def __init__(self, project_id=None):
self.template_aggregate = TemplateInfoAggregate()
self.step_template = StepTemplates(mongo_client=mongo_client, project_id=project_id)
self.logbook_conn = LogbookInfo(mongo_client=mongo_client, project_id=project_id)
self.logbook_aggregate = LogbookInfoAggregate()
self.common_utils = CommonUtils(project_id=project_id)
self.user_aggregate = UsersAggregate()
self.user = User(mongo_client=mongo_client)
self.const_conn = Constants(mongo_client=mongo_client)
self.step_data_files_conn = StepDataFiles(mongo_client=mongo_client, project_id=project_id)
self.step_data_files_agg = StepDataFilesAggregate()
self.kafka_data_push = DataPush()
self.formde_proxy = PathToServices.DATA_ENGINE
self.backfill_api_path = f'{self.formde_proxy}{StageDataEndPoints.api_back_fill_data}'
async def create_template(self, template_file, data, user_id):
try:
template_name, template_id = data.get("template_name", ""), data.get("template_id", "")
check_template = self.step_template.find_template(
template_name=template_name)
if not template_id:
if check_template:
raise DuplicateTemplateNameError
template_id = 'step_template_' + self.common_utils.get_next_id("step_template_id")
meta = self.common_utils.get_user_meta(user_id, check_flag=True)
else:
if check_template and check_template.template_id != template_id:
raise DuplicateTemplateNameError
template_data = self.step_template.find_by_id(template_id=template_id)
meta = template_data.meta
meta.update(self.common_utils.get_user_meta(user_id, check_flag=False))
folder_name = os.path.join(PathToStorage.TEMPLATES_UPLOADS, TemplateStorage.templates_files)
file_save_path = self.save_file_data(template_file, folder_name, template_name)
final_data = dict(template_id=template_id, template_name=template_name,
logbook_id=data.get("logbook_id", ""),
associated_workflow_version=data.get("associated_workflow_version", 1),
meta=meta,
file_path=file_save_path)
self.step_template.update_template_data(template_id=template_id,
project_id=data.get("project_id"), data=final_data, upsert=True)
return True
except Exception as e:
logger.error(e.args)
raise
async def list_template_info(self, input_json: TemplateListRequest):
try:
response_data = TemplateListResponse()
template_data, response_data.total_no = await self.list_paginated_template_info(input_json.dict())
if response_data.total_no <= input_json.endRow - input_json.startRow:
response_data.endOfRecords = True
logbook_data = self.logbook_conn.get_logbook_data_by_aggregate(
self.logbook_aggregate.logbook_key_values_list(input_json.project_id))
logbook_data = logbook_data[0] if bool(logbook_data) else {}
logbook_version_names_mapping = self.logbook_conn.get_logbook_data_by_aggregate(
self.logbook_aggregate.get_logbook_versions_names_mapping(input_json.project_id))
logbook_version_names_mapping = logbook_version_names_mapping[0] if logbook_version_names_mapping else {}
all_users_list = list(
self.user.users_list_by_aggregate(query=self.user_aggregate.get_users_list(input_json.project_id)))
all_users_list = all_users_list[0] if bool(all_users_list) else {}
final_template_data = []
for each_template in template_data:
if not logbook_data.get(each_template.get("logbook_id", ""), ""):
continue
updated_on = each_template.get("updated_on", 0)
updated_on = int(updated_on / 1000)
each_template["updated_on"] = self.common_utils.get_iso_format(updated_on, input_json.timezone,
"%d %B %Y")
each_template["updated_by"] = all_users_list.get(each_template.get("updated_by", ""))
each_template["logbook_name"] = logbook_data.get(each_template.get("logbook_id", ""), "")
each_template["version_name"] = logbook_version_names_mapping.get(
f'{each_template.get("logbook_id", "")}${each_template.get("associated_workflow_version", 1) if each_template.get("associated_workflow_version", 1) else 1}')
final_template_data.append(each_template)
response_data.bodyContent = deepcopy(final_template_data)
return response_data
except Exception as e:
logger.error(e.args)
raise
async def list_paginated_template_info(self, input_data):
try:
json_data = self.template_aggregate.list_template_info(input_data["project_id"])
if input_data["filters"]:
for key, value in input_data["filters"].items():
if bool(value):
key = key.lower()
if key == "updated_by":
key = "meta.updated_by"
if key == "updated_on":
key = "meta.updated_at"
if key.lower() != "meta.updated_at":
json_data[0]["$match"].update(
{key: {"$regex": re.escape(value), '$options': "i"}})
else:
json_data[0]["$match"].update(
{key: {"$gte": int(value[0]), "$lte": value[1]}})
total_count_json = deepcopy(json_data)
json_data.extend([{'$skip': input_data["startRow"]},
{'$limit': input_data["endRow"]}])
response_aggregation = self.step_template.get_template_data_by_aggregate(json_data)
if not response_aggregation:
return list(), 0
records = list(response_aggregation)
total_records = list(self.step_template.get_template_data_by_aggregate(total_count_json))
return records, len(total_records)
except Exception as e:
logger.error(f"Failed to list templates by filters: {e}")
async def list_template_table_options(self, request_data: TemplateTableOptions):
try:
table_options_data = self.const_conn.find_constant(_type=request_data.type)
return table_options_data.data
except Exception as e:
logger.error(f"Failed to list table_options_data of templates: {e}")
async def delete_template(self, request_data: DeleteTemplate):
try:
template_data = self.step_template.find_by_id(template_id=request_data.template_id)
if not os.path.exists(template_data.file_path):
raise FileNotFoundError
self.delete_existing_file(template_data.file_path)
self.step_template.delete_template(template_id=request_data.template_id)
return True
except Exception as e:
logger.error(e.args)
raise
async def fetch_instance(self, request_data: FetchTemplate):
try:
response_data = self.step_template.find_by_id(template_id=request_data.template_id)
return response_data
except Exception as e:
logger.error(e.args)
raise
@staticmethod
def save_file_data(template_file, folder_name, template_name, extension=False):
try:
out_file_path = os.path.join(folder_name)
if not os.path.exists(out_file_path):
os.makedirs(out_file_path)
if not extension:
file_extension = template_file.filename.split(".")[-1]
template_name = f"{template_name}.{file_extension}"
out_file_path = os.path.join(out_file_path, template_name)
with open(out_file_path, 'wb') as out_file:
content = template_file.file.read()
out_file.write(content)
return out_file_path
except Exception as e:
logger.error(e.args)
raise
@staticmethod
def delete_existing_file(file_save_path):
try:
if os.path.exists(file_save_path):
os.remove(file_save_path)
except Exception as e:
logger.error(e.args)
raise
async def download_template(self, template_id):
try:
template_data = self.step_template.find_by_id(template_id=template_id)
if not os.path.exists(template_data.file_path):
raise FileNotFoundError
filepath = template_data.file_path
filename = filepath.split("/")[-1]
logger.debug(f"template downloaded : {filepath}")
if not filepath:
return None
return filepath, filename
except Exception as e:
logger.exception(e)
raise
async def get_template_key_value_pairs(self, input_json: TemplateKeyValuePairs):
try:
list_data = self.step_template.get_steps_data_data_by_aggregate(
self.template_aggregate.template_key_values_list(input_json.project_id, input_json.logbook_id))
list_data = list_data[0] if bool(list_data) else dict()
template_list = [{"label": key, "value": value} for value, key in list_data.items()]
return template_list
except Exception as e:
logger.exception(e)
raise
def save_template_data_file(self, data_file, data, user_id, project_id, request_obj: Request):
final_data, file_id = {}, ""
try:
template_data = self.step_template.find_by_id(template_id=data.get("template_id", ""))
meta = self.common_utils.get_user_meta(user_id, check_flag=True)
folder_name = os.path.join(PathToStorage.TEMPLATES_UPLOADS, TemplateStorage.upload_data_files)
if not os.path.exists(folder_name):
os.makedirs(folder_name)
file_id = f'file_{self.common_utils.get_next_id("file_")}_{data_file.filename}'
file_save_path = self.save_file_data(template_file=data_file, folder_name=folder_name,
template_name=file_id, extension=True)
final_data |= dict(file_id=file_id, file_name=data_file.filename, template_id=template_data.template_id,
logbook_id=template_data.logbook_id,
associated_workflow_version=template_data.associated_workflow_version, meta=meta,
status="Success")
self.step_data_files_conn.update_data_file(file_id=file_id, project_id=data.get("project_id"),
data=final_data, upsert=True)
self.parse_file(file_save_path, data.get("date"), template_data.file_path, data.get("logbook_id"),
data.get("timezone", "Asia/Kolkata"), project_id, request_obj=request_obj)
return True
except Exception as e:
final_data['status'] = "Failed"
self.step_data_files_conn.update_data_file(file_id=file_id, project_id=data.get("project_id"),
data=final_data, upsert=True)
logger.exception(e)
raise
@staticmethod
def check_columns_match(template_file_path, sheet1_columns, sheet2_columns):
try:
wb = openpyxl.load_workbook(template_file_path)
xls = pd.ExcelFile(template_file_path, engine='openpyxl')
template_sheets = wb.get_sheet_names()
template_sheet1_columns = list(pd.read_excel(xls, template_sheets[0]).keys())
template_sheet2_columns = list(pd.read_excel(xls, template_sheets[1]).keys())
sheet1_columns.sort(), template_sheet1_columns.sort(), sheet2_columns.sort(), template_sheet2_columns.sort()
if sheet1_columns == template_sheet1_columns and \
sheet2_columns == template_sheet2_columns:
return True
return False
except Exception as e:
logger.exception(e)
raise
def parse_file(self, file_save_path, date, template_file_path, logbook_id, timezone, project_id,
request_obj: Request):
try:
if not file_save_path.endswith('.xlsx'):
raise BulkUploadError
wb = openpyxl.load_workbook(file_save_path)
sheets_name = wb.get_sheet_names()
xls = pd.ExcelFile(file_save_path, engine='openpyxl')
df1 = pd.read_excel(xls, sheets_name[0])
records = df1.to_dict('records')
df2 = pd.read_excel(xls, sheets_name[1])
df2 = df2.dropna()
df2_columns = list(df2.keys())
columns_match = self.check_columns_match(template_file_path, list(df1.keys()), df2_columns)
if not columns_match:
raise ColumnsMisMatch
key_column = df2_columns[0]
value_column = df2_columns[1]
column_param_dict = df2.set_index(key_column)[value_column].to_dict()
column_param_dict_lower = {key.lower(): value for key, value in column_param_dict.items()}
tag_dict, site_dict = {}, {}
for each in records:
tag_hierarchy = each['iLens Hierarchy']
for key, value in each.items():
key = key.lower()
if all((
key in column_param_dict_lower, not pd.isna(value),
not pd.isna(tag_hierarchy))):
if not isinstance(value, (int, float)):
raise InvalidValueFound(f"Invalid value {value} in column {key}")
if '$' not in column_param_dict_lower[key]:
tag_hierarchy = f"{tag_hierarchy}${column_param_dict_lower[key]}"
else:
tag_hierarchy = tag_hierarchy + column_param_dict_lower[key]
tag_dict.update({tag_hierarchy: value})
if site_dict.get(tag_hierarchy.split("$")[0]):
site_dict[tag_hierarchy.split("$")[0]].update({tag_hierarchy: value})
else:
site_dict.update({tag_hierarchy.split("$")[0]: {tag_hierarchy: value}})
for key, values in site_dict.items():
kairos_dict = {date: values}
self.common_utils.publish_data_to_kafka(kairos_dict, project_id)
back_fill_data = {
"logbook_ids": [logbook_id],
"values": tag_dict,
"tz": timezone,
"time_str": self.common_utils.get_iso_format(date / 1000, timezone=timezone,
timeformat=date_time_with_hour)
}
cookies = request_obj.cookies
headers = {
'login-token': request_obj.headers.get('login-token', request_obj.cookies.get('login-token')),
'projectId': request_obj.cookies.get("projectId", request_obj.cookies.get("project_id",
request_obj.headers.get(
"projectId"))),
'userId': request_obj.cookies.get("user_id",
request_obj.cookies.get("userId", request_obj.headers.get(
"userId")))}
resp = requests.post(url=self.backfill_api_path, json=back_fill_data, cookies=cookies, headers=headers)
if resp.status_code not in CommonStatusCode.SUCCESS_CODES:
logger.debug('Failed response from back fill api')
else:
logger.info('Back fill api successfully executed')
except Exception as e:
logger.exception(e)
raise
async def list_uploaded_files(self, input_json: UploadedFileList):
try:
response_data = UploadedFileListResponse()
files_list_data, response_data.total_no = await self.list_paginated_files_list_info(
input_json.dict())
if response_data.total_no <= input_json.endRow - input_json.startRow:
response_data.endOfRecords = True
logbook_data = self.logbook_conn.get_logbook_data_by_aggregate(
self.logbook_aggregate.logbook_key_values_list(input_json.project_id))
logbook_data = logbook_data[0] if logbook_data else dict()
logbook_version_names_mapping = self.logbook_conn.get_logbook_data_by_aggregate(
self.logbook_aggregate.get_logbook_versions_names_mapping(input_json.project_id))
logbook_version_names_mapping = logbook_version_names_mapping[0] if logbook_version_names_mapping else {}
all_users_list = list(
self.user.users_list_by_aggregate(query=self.user_aggregate.get_users_list(input_json.project_id)))
if bool(all_users_list):
all_users_list = all_users_list[0]
else:
all_users_list = dict()
file_data = []
for each_template in files_list_data:
if not logbook_data.get(each_template.get("logbook_id", ""), ""):
continue
updated_on = each_template.get("updated_on", 0)
updated_on = int(updated_on / 1000)
each_template["updated_on"] = self.common_utils.get_iso_format(updated_on, input_json.timezone,
"%d %B %Y")
each_template["updated_by"] = all_users_list.get(each_template.get("updated_by", ""))
each_template["logbook_name"] = logbook_data.get(each_template.get("logbook_id", ""), "")
each_template['version_name'] = logbook_version_names_mapping.get(
f'{each_template.get("logbook_id", "")}${each_template.get("associated_workflow_version", 1) if each_template.get("associated_workflow_version", 1) else 1}')
file_data.append(each_template)
response_data.bodyContent = deepcopy(file_data)
return response_data
except Exception as e:
logger.error(e.args)
raise
async def list_paginated_files_list_info(self, input_data):
try:
json_data = self.step_data_files_agg.list_step_data_files(input_data["project_id"])
if input_data["filters"]:
for key, value in input_data["filters"].items():
if bool(value):
key = key.lower()
if key == "updated_by":
key = "meta.updated_by"
if key == "updated_on":
key = "meta.updated_at"
if key.lower() != "meta.updated_at":
json_data[0]["$match"].update(
{key: {"$regex": re.escape(value), '$options': "i"}})
else:
json_data[0]["$match"].update(
{key: {"$gte": int(value[0]), "$lte": value[1]}})
total_count_json = deepcopy(json_data)
json_data.extend([{'$skip': input_data["startRow"]},
{'$limit': input_data["endRow"]}])
response_aggregation = self.step_data_files_conn.get_step_files_data_by_aggregate(json_data)
if not response_aggregation:
return list(), 0
records = list(response_aggregation)
total_records = list(self.step_data_files_conn.get_step_files_data_by_aggregate(total_count_json))
return records, len(total_records)
except Exception as e:
logger.error(f"Failed to list templates by filters: {e}")
async def download_data_file(self, file_id):
try:
folder_name = os.path.join(PathToStorage.TEMPLATES_UPLOADS, TemplateStorage.upload_data_files)
step_file_data = self.step_data_files_conn.find_by_id(file_id=file_id)
if not step_file_data:
raise FileNotFoundError
filepath = f"{folder_name}/{step_file_data.file_id}"
filename = step_file_data.file_name
logger.debug(f"template downloaded : {filepath}")
if not filepath:
return None
return filepath, filename
except Exception as e:
logger.exception(e)
raise
async def delete_data_file(self, request_data: DeleteDataFile):
try:
folder_name = os.path.join(PathToStorage.TEMPLATES_UPLOADS, TemplateStorage.upload_data_files)
step_file_data = self.step_data_files_conn.find_by_id(file_id=request_data.file_id)
filepath = f"{folder_name}/{step_file_data.file_id}"
if not os.path.exists(filepath):
raise FileNotFoundError
self.delete_existing_file(filepath)
self.step_data_files_conn.delete_data_file(file_id=request_data.file_id)
return True
except Exception as e:
logger.error(e.args)
raise
from typing import Optional
from pydantic import BaseModel
class UserDataEntryRecord(BaseModel):
type: str
user_id: str
user_name: str
ip_address: str
date_time: int
tag_time: Optional[int]
source: str
previous_value: Optional[str] = ""
new_value: str
property_name: Optional[str] = ""
tag: Optional[str]
task_id: Optional[str] = ""
step_id: str
stage_id: Optional[str] = ""
project_id: Optional[str] = ""
action_status: str = "success"
error_logs: Optional[str]
from typing import Optional, Any
from pydantic import BaseModel
class TagList(BaseModel):
project_id: Optional[str]
status: Optional[str]
hierarchy: Optional[Any]
from typing import List, Optional, Dict
from pydantic import BaseModel
class SaveProdLoss(BaseModel):
date: str
step_id: str
line: str
equipment: str
loss_category: str
loss_reason: str
loss_reason_comment: str
loss_in_minutes: float
class Config:
orm_mode = True
class SaveTableRequest(BaseModel):
replicate_type: str
data_list: List
step_id: str
date: str
project_id: Optional[str]
cookies: Optional[Dict]
tz: str = "Asia/Kolkata"
class CustomRestAPIRequest(BaseModel):
submitted_data: Optional[Dict]
stage_id: str
project_id: str
task_id: str
tz: str
date: int
from typing import Optional, List, Dict, Any
from pydantic import BaseModel
class SaveForm(BaseModel):
project_id: Optional[str]
user_id: Optional[str]
stage_id: Optional[str]
submitted_data: Optional[Dict] = dict()
components: Optional[List]
stages: Optional[List]
type: Optional[str]
current_status: Optional[str]
date: Optional[int]
task_id: Optional[str]
tz: Optional[str] = "Asia/Kolkata"
template_type: Optional[List] = ["cross_step", "JMR"]
auto_populate_key: Optional[str] = "auto_populate"
triggers: Optional[Dict] = dict()
allow_all_manual: Optional[bool] = False
class TasksInfoList(BaseModel):
reference_id: str
task_creation_data: Dict
class CustomActionsModel(BaseModel):
task_details: Any
action: Any
submitted_data: Optional[Dict] = {}
on_click: str
tz: str
date: int
project_id: str
request_obj: Any
stage_id: str
from typing import Optional
from pydantic.main import BaseModel
class GetMultiFormData(BaseModel):
task_id: str
project_id: str
user_id: str
node_id: str
project_templates: Optional[str]
shouldHide: Optional[bool] = False
from typing import Optional, List
from pydantic.main import BaseModel
class EmailRequest(BaseModel):
receiver_list: List[str]
from_name: str
content: str
subject: str
gateway_id: str = "default"
template: Optional[str] = "default"
link: Optional[str]
class ExternRequest(BaseModel):
url: str
timeout: Optional[int]
cookies: Optional[dict]
params: Optional[dict]
auth: Optional[str]
from typing import Optional, Any
from pydantic import BaseModel
class DefaultResponse(BaseModel):
status: str = "Failed"
message: Optional[str]
data: Optional[Any]
class DefaultFailureResponse(DefaultResponse):
error: Any
message: Optional[Any]
class DefaultMobileResponse(BaseModel):
status: bool = False
message: Optional[str]
data: Optional[Any]
class DefaultMobileFailureResponse(DefaultResponse):
error: Any
message: Optional[Any] = "Failed"
from typing import Optional, Dict, Any
from pydantic import BaseModel
from scripts.constants.app_constants import SubmitAction
class StagesList(BaseModel):
task_id: str
workflow_id: str
task_status: str
workflow_version: int
project_id: str
mobile: Optional[bool] = False
class GetKDataRequest(BaseModel):
step_id: str
values: Dict
ts: int
time_str: Optional[str]
metadata: Optional[Dict] = dict()
tz: str = "Asia/Kolkata"
class SaveRemarks(BaseModel):
project_id: str
comment: Optional[str]
stage_id: Optional[str]
subject: Optional[str]
user_id: Optional[str]
user_name: Optional[str]
updated_on: Optional[str]
type: Optional[str]
parameter_specific: Optional[Dict] = dict()
class FetchRemarks(BaseModel):
stage_id: Optional[str]
project_id: str
timezone: Optional[str] = "Asia/Kolkata"
class GetTagsRequest(BaseModel):
step_id: str
class TriggerData(BaseModel):
trigger_time: Optional[int]
step_id: str
manual_entry: Optional[bool] = False
tz: str = "Asia/Kolkata"
trigger_method: str
status: Optional[int] = 1
tag_id: Optional[str]
from_time: Optional[int]
to_time: Optional[int]
class TriggerReferenceData(BaseModel):
project_id: Optional[str]
user_id: Optional[str]
stage_id: Optional[str]
submitted_data: Optional[Dict] = dict()
type: Optional[str] = SubmitAction.save
current_status: Optional[str]
task_id: Optional[str]
tz: Optional[str] = "Asia/Kolkata"
triggers: Optional[Dict] = dict()
date: Optional[int]
row_unique_key: str
property_value: Any
entity_key: str
field_type: str
class APIAction(BaseModel):
action_type: str
request_type: str
api: str
class MarkTaskCompleteRequest(BaseModel):
task_id: str
from_status: str
to_status: str
update_status: Optional[bool] = False
update_key: Optional[str]
update_value: Optional[str]
class SendNotification(BaseModel):
task_id: str
project_id: Optional[str]
tz: str = "Asia/Kolkata"
message: Optional[str] = ""
class CopyPropertyValues(BaseModel):
task_id: str
to_step_id: str
property_dict: dict
project_id: str
submitted_data: dict
periodic_date: Optional[str] = ""
from typing import Optional, List, Dict
from pydantic import BaseModel
class CreateTemplate(BaseModel):
pass
class TemplateListResponse(BaseModel):
bodyContent: Optional[List] = []
total_no: Optional[int]
endOfRecords: Optional[bool] = False
class UploadedFileListResponse(BaseModel):
bodyContent: Optional[List] = []
total_no: Optional[int]
endOfRecords: Optional[bool] = False
class TemplateListRequest(BaseModel):
startRow: Optional[int]
endRow: Optional[int]
filters: Optional[Dict]
project_id: Optional[str]
timezone: Optional[str] = "Asia/Kolkata"
class TemplateTableOptions(BaseModel):
project_id: Optional[str]
type: Optional[str]
class TemplateKeyValuePairs(BaseModel):
project_id: str
logbook_id: str
associated_workflow_version: int
class DeleteTemplate(BaseModel):
template_id: str
project_id: Optional[str]
class DeleteDataFile(BaseModel):
file_id: str
project_id: Optional[str]
class FetchTemplate(BaseModel):
template_id: str
project_id: str
class UploadedFileList(BaseModel):
startRow: Optional[int]
endRow: Optional[int]
filters: Optional[Dict]
project_id: Optional[str]
timezone: Optional[str] = "Asia/Kolkata"
from fastapi import APIRouter
from scripts.config.app_configurations import Service
from scripts.core.services.comments import comment_router
from scripts.core.services.custom import custom_router
from scripts.core.services.forms import form_router
from scripts.core.services.remarks import remark_router
from scripts.core.services.stages import stage_router
from scripts.core.services.stages_data import stages_data_router
from scripts.mobility.services.tasks import mobile_task_router
health_status = APIRouter()
@health_status.get(f"/api/{Service.MODULE_NAME}/healthcheck")
def ping():
return {"status": 200}
import traceback
from fastapi import APIRouter, Depends
from scripts.constants.api import CommentsEndPoints
from scripts.core.handlers.comments_handler import CommentHandler
from scripts.core.schemas.comments import TagList
from scripts.core.schemas.response_models import DefaultResponse, DefaultFailureResponse
from scripts.logging.logging import logger
from scripts.utils.security_utils.decorators import CookieAuthentication
from scripts.utils.security_utils.project_decorator import MetaInfoCookie, MetaInfoSchema
auth = CookieAuthentication()
comment_router = APIRouter(tags=["Stage services"], prefix=CommentsEndPoints.api_comment)
get_cookies = MetaInfoCookie()
@comment_router.post(CommentsEndPoints.api_list)
async def get_tags_comments(request_data: TagList, meta: MetaInfoSchema = Depends(get_cookies)):
try:
comment_handler = CommentHandler(project_id=meta.project_id)
response = comment_handler.get_tags_list(request_data)
return DefaultResponse(status="success", message="success", data=response).dict()
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message=e.args).dict()
import traceback
from fastapi import APIRouter, Depends
from sqlalchemy.orm import Session
from scripts.constants.api import FormEndPoints
from scripts.core.handlers.custom_handler import CustomHandler
from scripts.core.schemas.custom_models import SaveTableRequest
from scripts.core.schemas.response_models import DefaultResponse, DefaultFailureResponse
from scripts.db.psql.databases import get_assistant_db
from scripts.logging.logging import logger
from scripts.utils.security_utils.decorators import CookieAuthentication
from scripts.utils.security_utils.project_decorator import MetaInfoCookie
custom_router = APIRouter(tags=["Custom services"], prefix=FormEndPoints.api_custom)
auth = CookieAuthentication()
stage_handler = CustomHandler()
get_cookies = MetaInfoCookie()
@custom_router.post(FormEndPoints.api_save_table)
async def save_table_to_postgres(request_data: SaveTableRequest, db: Session = Depends(get_assistant_db)):
try:
response = stage_handler.save_table_to_postgres(request_data, db)
return DefaultResponse(status="success", message="success", data=response).dict()
except TypeError:
return DefaultFailureResponse(message="Field properties in form"
"do not match model properties, please revise.").dict()
except ModuleNotFoundError:
return DefaultFailureResponse(
message="Model not associated to step replicate type. Please consider revising.").dict()
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message=e.args).dict()
\ No newline at end of file
"""Services below are only concerned with CRUD operations associated to:
Workflow Stages
These are stages created in each instance
"""
import traceback
from typing import Optional
from fastapi import APIRouter, Depends, BackgroundTasks, Request
from sqlalchemy.orm import Session
from scripts.constants.api import FormEndPoints
from scripts.core.handlers.form_handler import FormHandler
from scripts.core.schemas.forms import SaveForm
from scripts.core.schemas.response_models import DefaultResponse, DefaultFailureResponse
from scripts.db.psql.databases import get_db
from scripts.errors import ILensPermissionError, RequiredFieldMissing, InternalError, QuantityGreaterThanException
from scripts.logging.logging import logger
from scripts.utils.security_utils.decorators import CookieAuthentication
from scripts.utils.security_utils.project_decorator import MetaInfoCookie, MetaInfoSchema
form_router = APIRouter(tags=["Form services"], prefix=FormEndPoints.api_render)
auth = CookieAuthentication()
get_cookies = MetaInfoCookie()
@form_router.post(FormEndPoints.api_form)
async def form_rendering_service(request_data: SaveForm,
request: Request,
bg_task: BackgroundTasks,
save: Optional[bool] = False,
submit: Optional[bool] = False,
mobile: Optional[bool] = False,
user_id=Depends(auth),
db: Session = Depends(get_db),
meta: MetaInfoSchema = Depends(get_cookies)
):
try:
form_handler = FormHandler(project_id=meta.project_id)
if not user_id:
raise ILensPermissionError("Denied: Unauthorized User")
request_data.user_id = user_id
message, response = await form_handler.form_renderer(request_data, user_id, request, save, submit, mobile, db,
bg_task)
if message:
return DefaultResponse(status="success", message=message, data=response).dict()
return DefaultResponse(status="success", data=response).dict()
except QuantityGreaterThanException as e:
return DefaultFailureResponse(message=e.args[0]).dict()
except InternalError as e:
return DefaultResponse(message=e.args[0], status="warning")
except RequiredFieldMissing as e:
logger.info(f"Required fields have not been filled by user")
return DefaultResponse(message=e.args[0])
except ILensPermissionError as e:
return DefaultResponse(message=e.args[0])
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message="Error encountered in rendering or saving form")
import traceback
from fastapi import APIRouter, Depends
from scripts.constants.api import FormEndPoints
from scripts.core.handlers.remarks_handler import RemarkHandler
from scripts.core.schemas.response_models import DefaultResponse, DefaultFailureResponse
from scripts.core.schemas.stages import SaveRemarks, FetchRemarks
from scripts.logging.logging import logger
from scripts.utils.security_utils.decorators import CookieAuthentication
from scripts.utils.security_utils.project_decorator import MetaInfoCookie, MetaInfoSchema
remark_router = APIRouter(tags=["Remark services"], prefix=FormEndPoints.api_remark)
auth = CookieAuthentication()
get_cookies = MetaInfoCookie()
@remark_router.post(FormEndPoints.api_save)
async def save_remarks(request_data: SaveRemarks, user_id=Depends(auth),
meta: MetaInfoSchema = Depends(get_cookies)):
try:
remark_handler = RemarkHandler(project_id=meta.project_id)
response = remark_handler.save_remarks_data(request_data, user_id)
return DefaultResponse(status="success", message="Saved comments successfully",
data=response).dict()
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args)
@remark_router.post(FormEndPoints.api_get)
async def fetch_remarks(request_data: FetchRemarks, user_id=Depends(auth),
meta: MetaInfoSchema = Depends(get_cookies)):
try:
remark_handler = RemarkHandler(project_id=meta.project_id)
response = remark_handler.fetch_remarks(request_data)
return DefaultResponse(status="success", message="Fetched comments successfully",
data=response).dict()
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args)
import traceback
from typing import Optional, Any
from fastapi import APIRouter, Depends, Request
from sqlalchemy.orm import Session
from starlette.background import BackgroundTasks
from scripts.constants.api import FormEndPoints
from scripts.constants.app_constants import AuditingKeys
from scripts.core.engine.form_renderer import FormRenderingEngine
from scripts.core.handlers.stage_handler import StageHandler
from scripts.core.schemas.response_models import DefaultResponse, DefaultFailureResponse
from scripts.core.schemas.stages import StagesList, GetKDataRequest, TriggerData, TriggerReferenceData, \
MarkTaskCompleteRequest, SendNotification, CopyPropertyValues
from scripts.db.psql.databases import get_db
from scripts.errors import StepsNotConfigured, ImplementationError, RestrictBlanks
from scripts.logging.logging import logger
from scripts.utils.security_utils.decorators import CookieAuthentication
from scripts.utils.security_utils.project_decorator import MetaInfoCookie, MetaInfoSchema
stage_router = APIRouter(tags=["Stage services"], prefix=FormEndPoints.api_stage)
auth = CookieAuthentication()
get_cookies = MetaInfoCookie()
@stage_router.post(FormEndPoints.api_list)
async def get_stages_list(request_data: StagesList, nav_type: Optional[str] = "", user_id=Depends(auth),
meta: MetaInfoSchema = Depends(get_cookies)):
try:
stage_handler = StageHandler(project_id=meta.project_id)
response = stage_handler.get_stages_list(request_data, user_id, nav_type)
return DefaultResponse(status="success", message="success", data=response).dict()
except StepsNotConfigured:
return DefaultFailureResponse(message="Steps are not configured for the selected task")
except PermissionError as e:
return DefaultFailureResponse(error="Permission Error",
message=f"Permission Error, possible causes: <br/> "
f"{e.args[0]}")
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message=e.args).dict()
@stage_router.post(FormEndPoints.api_add_periodic_data)
async def add_periodic_stage_data(request_data: GetKDataRequest, meta: MetaInfoSchema = Depends(get_cookies)):
try:
stage_handler = StageHandler(project_id=meta.project_id)
response = stage_handler.add_periodic_stage_data(request_data)
return DefaultResponse(status="success", message="success", data=response).dict()
except StepsNotConfigured:
return DefaultFailureResponse(message="Steps are not configured for the selected task")
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message=e.args).dict()
@stage_router.get(FormEndPoints.api_get_tags)
async def get_tags(step_id: str, missing: bool = False, meta: MetaInfoSchema = Depends(get_cookies)):
try:
stage_handler = StageHandler(project_id=meta.project_id)
response = stage_handler.get_tags(step_id, missing)
return DefaultResponse(status="success", message="success", data=response).dict()
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message=e.args).dict()
@stage_router.get(FormEndPoints.api_get_time_list)
async def get_time_list(step_id: str, meta: MetaInfoSchema = Depends(get_cookies)):
try:
stage_handler = StageHandler(project_id=meta.project_id)
response = stage_handler.get_time_list(step_id)
return DefaultResponse(status="success", message="success", data=response).dict()
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message=e.args).dict()
@stage_router.get(FormEndPoints.api_timewise_tags)
async def get_tag_and_time_list(step_id: str, meta: MetaInfoSchema = Depends(get_cookies)):
try:
stage_handler = StageHandler(project_id=meta.project_id)
response = await stage_handler.get_tag_and_time_list(step_id)
return DefaultResponse(status="success", message="success", data=response).dict()
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message=e.args).dict()
@stage_router.get(FormEndPoints.api_trigger)
async def trigger_data(step_id: str,
tz: str,
request: Request,
trigger_time: Optional[int] = None,
option: str = "add_row",
trigger_method: Optional[str] = AuditingKeys.machine,
status: Optional[int] = None,
tag_id: Optional[str] = None,
real_time: Optional[bool] = True,
restrict_blanks: Optional[bool] = False,
meta: MetaInfoSchema = Depends(get_cookies)):
try:
stage_handler = StageHandler(project_id=meta.project_id)
trigger_model = TriggerData(step_id=step_id, tz=tz, trigger_method=trigger_method, status=status, tag_id=tag_id)
if trigger_time:
trigger_model.trigger_time = trigger_time
response = await stage_handler.add_triggered_data(option, trigger_model, real_time, restrict_blanks,
project_id=meta.project_id,
request_obj=request)
return DefaultResponse(status="success", message="Trigger operation successful", data=response).dict()
except RestrictBlanks:
return DefaultResponse(
status="success",
message="Triggered successfully. Row not populated since Restrict blanks was enabled").dict()
except ModuleNotFoundError:
return DefaultResponse(
message="Error occurred while connecting to Periodic Data Engine. Please Contact Admin").dict()
except ImplementationError as message:
return DefaultResponse(message=message.args[0]).dict()
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message=e.args).dict()
@stage_router.get(FormEndPoints.api_trigger_task_completion)
async def trigger_task_completion(factor: str, meta: MetaInfoSchema = Depends(get_cookies)):
try:
stage_handler = StageHandler(project_id=meta.project_id)
response = await stage_handler.trigger_task_completion(factor)
return DefaultResponse(status="success", message="success", data=response).dict()
except KeyError:
return DefaultResponse(
message="Key Error encountered while updating triggered tasks").dict()
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message=e.args).dict()
@stage_router.put(FormEndPoints.api_mark_task_complete)
async def mark_task_complete(request_data: MarkTaskCompleteRequest,
meta: MetaInfoSchema = Depends(get_cookies)):
try:
stage_handler = StageHandler(project_id=meta.project_id)
response = await stage_handler.mark_task_complete(request_data)
return DefaultResponse(status="success", message="success", data=response).dict()
except KeyError:
return DefaultResponse(
message="Key Error encountered while marking task complete").dict()
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message=e.args).dict()
@stage_router.get(FormEndPoints.api_list_periodic_steps)
async def get_periodic_steps(skip: int, limit: int = 5, meta: MetaInfoSchema = Depends(get_cookies)):
try:
stage_handler = StageHandler(project_id=meta.project_id)
response = await stage_handler.get_periodic_steps(skip, limit)
return DefaultResponse(status="success", message="success", data=response).dict()
except Exception as e:
tb = traceback.format_exc()
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message=e.args).dict()
@stage_router.get(FormEndPoints.api_reference)
async def trigger_reference_data(
user_id: str, stage_id: str, task_id: str, tz: str, project_id: str,
current_status: str, date: Optional[str] = None, entity_name: Optional[str] = None,
entity_key: Optional[str] = None, property_value: Optional[Any] = None,
row_unique_key: Optional[str] = None, field_type: Optional[str] = "textfield",
entity_search: Optional[str] = "true", meta: MetaInfoSchema = Depends(get_cookies)
):
try:
form_render = FormRenderingEngine(project_id=meta.project_id)
if not row_unique_key or not property_value or not entity_key or (
not entity_name and entity_search.lower() != "false"):
return DefaultResponse(status="success", message="success").dict()
request_data = TriggerReferenceData(user_id=user_id, stage_id=stage_id, task_id=task_id, tz=tz,
property_value=property_value, entity_key=entity_key,
project_id=project_id,
row_unique_key=row_unique_key, field_type=field_type,
current_status=current_status)
if date:
request_data.triggers = dict(date=date)
response = await form_render.form_fill_with_reference_data(input_data=request_data,
entity_search=entity_search, entity_name=entity_name)
return DefaultResponse(status="success", message="success", data=dict(submitted_data=response)).dict()
except ModuleNotFoundError:
return DefaultResponse(
message="Error occurred while connecting to Reference Data Engine. Please Contact Admin").dict()
except ImplementationError as message:
return DefaultResponse(message=message.args[0]).dict()
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message=e.args).dict()
@stage_router.post(FormEndPoints.api_send_notification)
async def send_notification(request_data: SendNotification, meta: MetaInfoSchema = Depends(get_cookies)):
try:
stage_handler = StageHandler(project_id=meta.project_id)
response = await stage_handler.send_notification_for_roles(request_data)
return DefaultResponse(status="success", message="success", data=response).dict()
except Exception as e:
tb = traceback.format_exc()
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message=e.args).dict()
@stage_router.post(FormEndPoints.api_copy_property_values)
async def copy_property_values(request_data: CopyPropertyValues,
bg_task: BackgroundTasks,
request: Request,
db: Session = Depends(get_db),
meta: MetaInfoSchema = Depends(get_cookies),
user_id=Depends(auth),
):
try:
stage_handler = StageHandler(project_id=meta.project_id)
response = await stage_handler.copy_property_values(request_data, user_id, bg_task, request, db)
return DefaultResponse(status="success", message="success", data=response).dict()
except Exception as e:
tb = traceback.format_exc()
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message=e.args).dict()
import json
import traceback
from fastapi import APIRouter, UploadFile, File, Form, Depends, HTTPException, Request
from fastapi.responses import FileResponse
from scripts.constants.api import FormEndPoints, StageDataEndPoints
from scripts.core.handlers.stages_data import StagesData
from scripts.core.schemas.response_models import DefaultResponse, DefaultFailureResponse
from scripts.core.schemas.stages_data import TemplateListRequest, FetchTemplate, \
TemplateTableOptions, TemplateKeyValuePairs, UploadedFileList, DeleteTemplate, DeleteDataFile
from scripts.errors import DuplicateTemplateNameError, BulkUploadError, ColumnsMisMatch, InvalidValueFound
from scripts.logging.logging import logger
from scripts.utils.security_utils.decorators import CookieAuthentication
from scripts.utils.security_utils.project_decorator import MetaInfoCookie, MetaInfoSchema
stages_data_router = APIRouter(tags=["Stage services"], prefix=FormEndPoints.api_stages_data)
get_cookies = MetaInfoCookie()
auth = CookieAuthentication()
@stages_data_router.post(StageDataEndPoints.api_create_template)
async def create_template(template_file: UploadFile = File(...), data: str = Form(...), user_id=Depends(auth),
meta: MetaInfoSchema = Depends(get_cookies)):
try:
if template_file.content_type not in (
"application/pdf",
"text/csv",
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
):
raise HTTPException(400, detail="Invalid document type")
stage_data_handler = StagesData(project_id=meta.project_id)
data = json.loads(data)
response = await stage_data_handler.create_template(template_file, data, user_id)
if response:
return DefaultResponse(status="success", message="Saved successfully", data=response).dict()
except DuplicateTemplateNameError:
return DefaultFailureResponse(message="Duplicate name exists")
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message="Error Occurred while saving template")
@stages_data_router.post(StageDataEndPoints.api_list_template, dependencies=[Depends(auth)])
async def list_template_info(request_data: TemplateListRequest,
meta: MetaInfoSchema = Depends(get_cookies)):
try:
stage_data_handler = StagesData(project_id=meta.project_id)
response = await stage_data_handler.list_template_info(request_data)
return DefaultResponse(status="success",
message="Listed successfully",
data=response).dict()
except Exception as e:
logger.exception(e)
logger.exception(traceback.format_exc())
return DefaultFailureResponse(error=e.args)
@stages_data_router.post(StageDataEndPoints.api_template_table_options, dependencies=[Depends(auth)])
async def list_template_info(request_data: TemplateTableOptions,
meta: MetaInfoSchema = Depends(get_cookies)):
try:
stage_data_handler = StagesData(project_id=meta.project_id)
response = await stage_data_handler.list_template_table_options(request_data)
return DefaultResponse(status="success",
message="Fetched successfully",
data=response).dict()
except Exception as e:
logger.exception(e)
logger.exception(traceback.format_exc())
return DefaultFailureResponse(error=e.args, message="Error Occurred while listing templates")
@stages_data_router.post(StageDataEndPoints.api_delete_template, dependencies=[Depends(auth)])
async def delete_template_data(request_data: DeleteTemplate,
meta: MetaInfoSchema = Depends(get_cookies)):
try:
stage_data_handler = StagesData(project_id=meta.project_id)
response = await stage_data_handler.delete_template(request_data)
return DefaultResponse(status="success",
message="Template deleted Successfully",
data=response).dict()
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message="Error Occurred while deleting template")
@stages_data_router.post(StageDataEndPoints.api_fetch_template)
async def fetch_task_info(request_data: FetchTemplate, meta: MetaInfoSchema = Depends(get_cookies)):
try:
stage_data_handler = StagesData(project_id=meta.project_id)
response = stage_data_handler.fetch_instance(request_data)
return DefaultResponse(status="success", data=response).dict()
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message="Error occurred in server while fetching task info")
@stages_data_router.get(StageDataEndPoints.api_download_template)
async def download_template(template_id: str, meta: MetaInfoSchema = Depends(get_cookies)):
try:
stage_data_handler = StagesData(project_id=meta.project_id)
file_path, filename = await stage_data_handler.download_template(template_id)
if file_path:
return FileResponse(file_path, filename=filename)
else:
raise HTTPException(status_code=404, detail="File Not Found")
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message="Error occurred while downloading template")
@stages_data_router.post(StageDataEndPoints.api_upload_data_sheet)
def upload_data_sheet(request: Request, data_file: UploadFile = File(...), data: str = Form(...), user_id=Depends(auth),
meta: MetaInfoSchema = Depends(get_cookies)):
try:
if data_file.content_type not in (
"application/pdf",
"text/csv",
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
):
raise HTTPException(400, detail="Invalid document type")
stage_data_handler = StagesData(project_id=meta.project_id)
data = json.loads(data)
response = stage_data_handler.save_template_data_file(data_file, data, user_id, meta.project_id,
request_obj=request)
return DefaultResponse(status="success",
message="Template uploaded Successfully",
data=response).dict()
except BulkUploadError:
return DefaultFailureResponse(error="File type not supported", message="File type not supported")
except ColumnsMisMatch:
return DefaultFailureResponse(error="Columns mismatch", message="Columns mismatch")
except InvalidValueFound as invalidMessage:
return DefaultFailureResponse(error=invalidMessage, message="Invalid value found in file")
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message="Error Occurred while saving data_file")
@stages_data_router.post(StageDataEndPoints.api_get_templates, dependencies=[Depends(auth)])
async def list_templates_dropdown(request_data: TemplateKeyValuePairs,
meta: MetaInfoSchema = Depends(get_cookies)):
try:
stage_data_handler = StagesData(project_id=meta.project_id)
response = await stage_data_handler.get_template_key_value_pairs(request_data)
return DefaultResponse(status="success", data=response).dict()
except Exception as e:
logger.exception(e)
logger.exception(traceback.format_exc())
return DefaultFailureResponse(error=e.args)
@stages_data_router.post(StageDataEndPoints.api_get_file_data_list, dependencies=[Depends(auth)])
async def list_uploaded_files(request_data: UploadedFileList,
meta: MetaInfoSchema = Depends(get_cookies)):
try:
stage_data_handler = StagesData(project_id=meta.project_id)
response = await stage_data_handler.list_uploaded_files(request_data)
return DefaultResponse(status="success",
message="Listed successfully",
data=response).dict()
except Exception as e:
logger.exception(e)
logger.exception(traceback.format_exc())
return DefaultFailureResponse(error=e.args, message="Error Occurred while listing uploaded files")
@stages_data_router.get(StageDataEndPoints.api_download_data_file)
async def download_data_file(file_id: str, meta: MetaInfoSchema = Depends(get_cookies)):
try:
stage_data_handler = StagesData(project_id=meta.project_id)
file_path, filename = await stage_data_handler.download_data_file(file_id)
if file_path:
return FileResponse(file_path, filename=filename)
else:
raise HTTPException(status_code=404, detail="File Not Found")
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message="Error occurred while downloading file")
@stages_data_router.post(StageDataEndPoints.api_delete_data_file, dependencies=[Depends(auth)])
async def delete_data_file(request_data: DeleteDataFile,
meta: MetaInfoSchema = Depends(get_cookies)):
try:
stage_data_handler = StagesData(project_id=meta.project_id)
response = await stage_data_handler.delete_data_file(request_data)
return DefaultResponse(status="success",
message="File deleted Successfully",
data=response).dict()
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args, message="Error Occurred while deleting file")
from scripts.config.app_configurations import DBConf
from scripts.db.mongo.ilens_assistant.collections.form_props import FormProps
from scripts.db.mongo.ilens_assistant.collections.logbook_links import LogbookLinkInfo
from scripts.db.mongo.ilens_assistant.collections.periodic_data import PeriodicData
from scripts.db.mongo.ilens_assistant.collections.steps import StepCollection, StepSchema
from scripts.db.mongo.ilens_assistant.collections.task_info import TaskCollection
from scripts.db.mongo.ilens_assistant.collections.task_instance_data import TaskInstanceData, TaskInstanceDataSchema
from scripts.db.mongo.ilens_assistant.collections.task_instances import TaskInstance
from scripts.db.mongo.ilens_assistant.collections.task_instances import TaskInstance
from scripts.db.mongo.ilens_assistant.collections.task_instances import TaskInstance, TaskInstanceSchema
from scripts.db.mongo.ilens_assistant.collections.trigger_steps import TriggerStepCollection, TriggerStepSchema
from scripts.db.mongo.ilens_assistant.collections.trigger_steps import TriggerStepCollection, TriggerStepSchema
from scripts.db.mongo.ilens_assistant.collections.triggers import Trigger
from scripts.db.mongo.ilens_assistant.collections.workflow import Workflow
from scripts.db.mongo.ilens_assistant.collections.workflow import Workflow, WorkflowSchema
from scripts.db.mongo.ilens_configuration.collections.constants import Constants
from scripts.db.mongo.ilens_configuration.collections.user import User
from scripts.utils.mongo_util import MongoConnect
mongo_client = MongoConnect(uri=DBConf.MONGO_URI)()
class CommonAggregates:
@staticmethod
def get_workflows_permissions(workflow_id: str, workflow_version: int, workflow_status: str, user_role: str):
query_json = [
{
'$match': {
'workflow_id': workflow_id,
'workflow_version': workflow_version,
'workflow_status': workflow_status,
'user_role': user_role
}
},
{'$project': {"_id": 0}}
]
return query_json
@staticmethod
def get_step_details(steps: list):
query_json = [
{
'$match': {
"step_id": {"$in": steps}
}
}, {
'$group': {
'_id': None,
'data': {
'$push': {
'k': '$step_id',
'v': {"display_title": {"$ifNull": ['$display_title', ""]},
"menu_placement": {"$ifNull": ['$menu_placement', ""]}}
}
}
}
}, {
'$replaceRoot': {
'newRoot': {
'$arrayToObject': '$data'
}
}
}
]
return query_json
from typing import Optional, List, Dict
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.constants.db_keys import AssetDetailsKeys
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class AssetDetailSchema(MongoBaseSchema):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
asset_model_id: Optional[str]
allow_editing: Optional[bool] = True
asset_description: Optional[str]
asset_version: Optional[str]
asset_model_name: Optional[str]
asset_model_type: Optional[str]
asset_model_icon: Optional[str]
parameters: Optional[Dict] = dict()
parameters_new: Optional[Dict] = dict()
processes: Optional[list] = list()
device_models: Optional[List] = list()
events: Optional[List] = list()
resources: Optional[Dict] = dict()
others: Optional[Dict] = dict()
class AssetDetail(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_asset_model,
collection=CollectionNames.asset_model_details)
self.project_id = project_id
@property
def key_project_id(self):
return AssetDetailsKeys.KEY_PROJECT_ID
@property
def key_asset_model_id(self):
return AssetDetailsKeys.KEY_ASSET_MODEL_ID
@property
def key_asset_version(self):
return AssetDetailsKeys.KEY_ASSET_VERSION
@property
def key_asset_model_name(self):
return AssetDetailsKeys.KEY_ASSET_MODEL_NAME
def find_asset_detail_by_id(self, asset_id, asset_version, filter_dict=None):
query = dict()
if asset_id:
query.update({self.key_asset_model_id: asset_id})
if asset_version:
query.update({self.key_asset_version: asset_version})
asset_data = self.find_one(query=query, filter_dict=filter_dict)
if asset_data:
return asset_data
return dict()
class LogbookInfoAggregate:
@staticmethod
def logbook_key_values_list(project_id: str, step_category: str = None):
query_dict = {"project_id": project_id, "is_deleted": False}
if bool(step_category):
query_dict.update(template_category=step_category)
return [{'$match': query_dict},
{'$project': {'logbook_id': '$logbook_id', 'logbook_name': '$logbook_name',
'logbookName': {'$toLower': '$logbook_name'}}},
{'$sort': {'logbookName': -1}}, {'$group': {'_id': None, 'data': {
'$push': {'k': {'$ifNull': ['$logbook_id', '']}, 'v': {'$ifNull': ['$logbook_name', '']}}}}},
{'$replaceRoot': {'newRoot': {'$arrayToObject': '$data'}}}]
@staticmethod
def get_logbook_versions_names_mapping(project_id):
return [
{
'$match': {
'project_id': project_id
}
}, {
'$lookup': {
'from': 'workflows',
'localField': 'associated_workflow_id',
'foreignField': 'workflow_id',
'as': 'workflow_data'
}
}, {
'$unwind': {
'path': '$workflow_data'
}
}, {
'$group': {
'_id': None,
'data': {
'$push': {
'k': {
'$concat': [
'$logbook_id', {
'$literal': '$'
}, {
'$toString': '$workflow_data.workflow_version'
}
]
},
'v': {
'$ifNull': [
'$workflow_data.version_comments', {
'$concat': [
'Version ', {
'$toString': '$workflow_data.workflow_version'
}, '.0'
]
}
]
}
}
}
}
}, {
'$replaceRoot': {
'newRoot': {
'$arrayToObject': '$data'
}
}
}
]
class PeriodicDataAgg:
@staticmethod
def get_latest_value(step_id, date, granular_tag_query):
query = [
{
'$match': {
'step_id': step_id,
"$expr": {"$lt": [{"$dateFromString": {"dateString": "$date"}}, date]},
'$and': [
{
'data.values': {
'$exists': 1
}
}, {
'data.values': {
'$ne': {}
}
},
granular_tag_query
]
}
},
{
'$project': {
'step_id': '$step_id',
'manual_data': '$manual_data',
'data': '$data',
'date': {
'$dateFromString': {
'dateString': '$date'
}
}
}
}, {
'$unwind': {
'path': '$data'
}
}, {
'$sort': {
'data.ts': -1
}
}
]
return query
@staticmethod
def month_to_date(step_id, to_date, mtd_keys_match):
query = [
{
'$match': {
'step_id': step_id
}
},
{
'$addFields': {
'date': {
'$dateFromString': {
'dateString': '$date',
'format': '%Y-%m-%d'
}
}
}
},
{
"$match": {
"date": {
"$lte": to_date
},
"$expr": {
"$and": [
{
"$eq": [
{
"$month": '$date'
},
to_date.month
]
},
{
"$eq": [
{
"$year": '$date'
},
to_date.year
]
}
]
}
}
},
{
'$project': {
'date': '$date',
'manual_data': '$manual_data'
}
},
{
"$group": {
"_id": {
"Date": {
"$month": "$date"
}
},
**mtd_keys_match
}
},
{
'$project': {
'_id': 0
}
}
]
return query
class StepDataFilesAggregate:
@staticmethod
def list_step_data_files(project_id):
query_json = [
{
'$match': {
'project_id': project_id
}
},
{
'$sort': {
'meta.updated_at': -1
}
},
{
'$project': {
'_id': 0,
'template_id': '$template_id',
'file_id': '$file_id',
'logbook_id': '$logbook_id',
'project_id': '$project_id',
'file_name': '$file_name',
'updated_on': '$meta.updated_at',
'updated_by': '$meta.updated_by',
'status': '$status',
'associated_workflow_version': '$associated_workflow_version'
}
}
]
return query_json
from typing import Optional
class TemplateInfoAggregate:
@staticmethod
def list_template_info(project_id):
query_json = [
{
'$match': {
'project_id': project_id
}
},
{
'$sort': {
'meta.updated_at': -1
}
},
{
'$project': {
'_id': 0,
'template_id': '$template_id',
'project_id': '$project_id',
'template_name': '$template_name',
'updated_on': '$meta.updated_at',
'updated_by': '$meta.updated_by',
'logbook_id': '$logbook_id',
'associated_workflow_version': '$associated_workflow_version'
}
}
]
return query_json
@staticmethod
def template_key_values_list(project_id: str, logbook_id: Optional[str]):
query_dict = {"project_id": project_id}
if logbook_id:
query_dict.update(logbook_id=logbook_id)
query_json = [{'$match': query_dict}, {'$sort': {'meta.created_at': -1}}, {'$group': {'_id': None, 'data': {
'$push': {'k': {'$ifNull': ['$template_id', '']}, 'v': {'$ifNull': ['$template_name', '']}}}}},
{'$replaceRoot': {'newRoot': {'$arrayToObject': '$data'}}}]
return query_json
from typing import Optional, Dict
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.constants.db_keys import FormPropsKeys
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class FormPropsSchema(MongoBaseSchema):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
step_id: Optional[str]
form_info: Optional[Dict]
class FormProps(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_assistant,
collection=CollectionNames.form_props)
self.project_id = project_id
@property
def key_step_id(self):
return FormPropsKeys.KEY_STEP_ID
@property
def key_form_info(self):
return FormPropsKeys.KEY_FORM_INFO
def add_form_info(self, step_id, form_info):
json_data = {self.key_step_id: step_id, self.key_form_info: form_info}
self.insert_one(json_data)
return True
def update_form_info(self, step_id, form_info, overwrite=True):
json_data = {self.key_form_info: form_info} if overwrite else form_info
query = {self.key_step_id: step_id}
self.update_one(query, json_data, True)
return True
def find_by_id(self, step_id: str):
query = {self.key_step_id: step_id}
record = self.find_one(query)
if not record:
return FormPropsSchema(**dict())
return FormPropsSchema(**record)
def find_step_props(self, step_list):
records = self.find({self.key_step_id: {"$in": step_list}}, {"step_id": 1, "form_info": 1})
if not records:
return dict()
return {x['step_id']: x["form_info"] for x in records}
from typing import Optional, Dict
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class LogbookInfoSchema(MongoBaseSchema):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
project_id: Optional[str]
logbook_name: Optional[str]
logbook_description: Optional[str]
logbook_id: Optional[str]
logbook_type: Optional[str]
logbook_tags: Optional[list] = []
is_deleted: Optional[bool]
meta: Optional[Dict]
hierarchy_dict: Optional[dict] = {},
hierarchy_level: Optional[str]
create_step_id: Optional[str]
associated_workflow_id: Optional[str]
associated_workflow_version: Optional[str]
class LogbookInfo(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_assistant,
collection=CollectionNames.logbook)
self.project_id = project_id
def find_by_id(self, logbook_id: str):
query = {"logbook_id": logbook_id}
record = self.find_one(query)
if not record:
return LogbookInfoSchema(**dict())
return LogbookInfoSchema(**record)
def get_logbook_data(self, **query):
record = self.find_one(query)
if not record:
return None
return record
def add_new_logbook(self, data):
self.insert_one(data.dict())
return LogbookInfoSchema(**data.dict()).logbook_id
def update_logbook_data(self, logbook_id, data, upsert=False):
query = {"logbook_id": logbook_id}
return self.update_one(data=data, query=query, upsert=upsert)
def delete_builder(self, logbook_id):
query = {"logbook_id": logbook_id}
return self.delete_one(query=query)
def get_logbook_data_by_aggregate(self, query: list):
return list(self.aggregate(pipelines=query))
def find_logbooks(self, project_id: str):
query = {"project_id": project_id}
record = self.find(query)
if not record:
return None
return list(record)
from typing import Optional, List
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class LogbookLinkSchema(MongoBaseSchema):
logbook_id: Optional[str]
external_links: Optional[List] = []
is_deleted: Optional[bool]
class LogbookLinkInfo(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_assistant,
collection=CollectionNames.logbook_links)
self.project_id = project_id
def find_by_logbook_id(self, logbook_id):
query = {"logbook_id": logbook_id}
record = self.find_one(query)
if not record:
return LogbookLinkSchema(**dict())
return LogbookLinkSchema(**record)
from datetime import date
from typing import Optional, List, Dict
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.constants.db_keys import PeriodicDataKeys
from scripts.db.mongo.ilens_assistant.aggregations.periodic_data import PeriodicDataAgg
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class PeriodicDataSchema(MongoBaseSchema):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
step_id: Optional[str]
date: Optional[str]
data: Optional[List] = []
manual_data: Optional[Dict] = dict()
class PeriodicData(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_assistant,
collection=CollectionNames.periodic_data)
self.project_id = project_id
@property
def key_step_id(self):
return PeriodicDataKeys.KEY_STEP_ID
@property
def key_date(self):
return PeriodicDataKeys.KEY_DATE
@property
def key_data(self):
return PeriodicDataKeys.KEY_DATA
@property
def key_manual_data(self):
return PeriodicDataKeys.KEY_MANUAL_DATA
def find_by_id(self, step_id: str):
query = {self.key_step_id: step_id}
record = self.find_one(query)
if not record:
return None
return PeriodicDataSchema(**record)
def find_by_date_and_step(self, _date: str, step_id: str):
query = {self.key_date: _date, self.key_step_id: step_id}
record = self.find_one(query)
if not record:
return PeriodicDataSchema()
return PeriodicDataSchema(**record)
def find_by_latest_data(self, step_id, current_date, default_tags):
granular_tag_query = {f'data.values.{x}': {'$exists': 1} for x in default_tags}
query = PeriodicDataAgg.get_latest_value(step_id, current_date, granular_tag_query)
records = self.aggregate(query)
tag_dict = {}
# TODO: Can this go on aggregation pipeline?
for record in records:
values = record.get("data", {}).get("values", {})
for key, val in values.items():
if key not in tag_dict and key in default_tags:
tag_dict.update({key: val})
return dict(ts=0, values=tag_dict)
def find_mtd(self, step_id, to_date, mtd_keys):
mtd_keys_match = {x: {"$sum": f"$manual_data.{y}"} for x, y in mtd_keys.items()}
query = PeriodicDataAgg.month_to_date(step_id, to_date, mtd_keys_match)
records = self.aggregate(query)
return list(records)
def find_by_date_and_multi_step(self, _date: str, step_id_list: List):
query = {self.key_date: _date, self.key_step_id: {"$in": step_id_list}}
record = self.find(query)
if not record:
return list()
return record
def insert_date(self, _date: date, step_id: str, data):
json_data = {self.key_date: _date, self.key_step_id: step_id, self.key_data: data}
self.insert_one(json_data)
return True
def update_data_with_date(self, _date: str, step_id: str, data):
query = {self.key_date: _date, self.key_step_id: step_id}
self.update_to_set(query, self.key_data, data, upsert=True)
return True
def update_data_with_date_periodic(self, _date: str, step_id: str, manual_data):
query = {self.key_date: _date, self.key_step_id: step_id}
self.update_one(query, data=manual_data, upsert=True)
return True
def save_and_update_periodic_data(self, _date: str, step_id: str, periodic_data):
query = {self.key_date: _date, self.key_step_id: step_id}
existing_record = self.find_by_date_and_step(_date, step_id)
ts_list = list()
if existing_record.data:
for each in existing_record.data:
if "values" in each and each["ts"] == periodic_data.get("ts"):
values_dict = each.get("values", {})
values_dict.update(periodic_data.get("values", {}))
ts_list.append(each["ts"])
if periodic_data.get("ts") not in ts_list:
existing_record.data.append({"ts": periodic_data.get("ts"), "values": periodic_data.get("values")})
periodic_data = existing_record.data
else:
periodic_data = [dict(ts=periodic_data.get("ts"), values=periodic_data.get("values"))]
data = {self.key_date: _date, self.key_step_id: step_id, self.key_data: periodic_data}
self.update_one(query, data, upsert=True)
return True
def save_and_update_data(self, _date: str, step_id: str, data):
manual_data, periodic_data = data.get("manual_data"), data.get("data")
query = {self.key_date: _date, self.key_step_id: step_id}
existing_record = self.find_by_date_and_step(_date, step_id)
ts_list = list()
if existing_record.data:
for each in existing_record.data:
if "values" in each and each["ts"] in periodic_data:
ts = each["ts"]
ts_list.append(ts)
each["values"].update(periodic_data[ts])
for ts, values in periodic_data.items():
if ts not in ts_list:
existing_record.data.append({"ts": ts, "values": values})
periodic_data = existing_record.data
else:
periodic_data = [dict(ts=x, values=y) for x, y in periodic_data.items()]
if not manual_data and not periodic_data:
return True
if manual_data and periodic_data:
existing_record.manual_data.update(manual_data)
data = {self.key_date: _date, self.key_step_id: step_id, self.key_data: periodic_data,
self.key_manual_data: existing_record.manual_data}
elif periodic_data:
data = {self.key_date: _date, self.key_step_id: step_id, self.key_data: periodic_data}
else:
existing_record.manual_data.update(manual_data)
data = {self.key_date: _date, self.key_step_id: step_id, self.key_manual_data: existing_record.manual_data}
self.update_one(query, data, upsert=True)
return True
def find_data_with_date(self, _date_query: dict, step_id: str, sort_json={}):
query = {self.key_date: _date_query, self.key_step_id: step_id}
record = self.find(query, sort=list(sort_json.items()))
if not record:
return list()
return record
from typing import Optional, List
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.constants.db_keys import ProjectRemarksKeys
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class ProjectRemarksSchema(MongoBaseSchema):
project_id: Optional[str] = None
remarks: Optional[List] = None
class ProjectRemarks(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_assistant,
collection=CollectionNames.project_remarks)
self.project_id = project_id
@property
def key_project_id(self):
return ProjectRemarksKeys.KEY_PROJECT_ID
@property
def key_remarks(self):
return ProjectRemarksKeys.Key_REMARKS
def find_by_id(self, project_id: str):
query = {self.key_project_id: project_id}
record = self.find_one(query)
if not record:
return None
return record
def update_project_info(self, project_id, data, upsert=False):
query = {self.key_project_id: project_id}
json_data = {self.key_remarks: data}
return self.update_one(data=json_data, query=query, upsert=upsert)
from datetime import date
from typing import Optional, List, Dict
from scripts.constants import StepCategories
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.constants.db_keys import ReferenceDataKeys
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class ReferenceStepSchema(MongoBaseSchema):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
step_id: Optional[str]
properties: Optional[dict] = dict()
data: Optional[Dict] = dict()
class ReferenceStep(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_assistant,
collection=CollectionNames.reference_steps)
self.project_id = project_id
@property
def key_step_id(self):
return ReferenceDataKeys.KEY_STEP_ID
@property
def key_date(self):
return ReferenceDataKeys.KEY_DATE
@property
def key_data(self):
return ReferenceDataKeys.KEY_DATA
@property
def key_property(self):
return ReferenceDataKeys.KEY_PROPERTIES
@property
def key_step_category(self):
return ReferenceDataKeys.KEY_STEP_CATEGORY
@property
def key_entity_name(self):
return ReferenceDataKeys.KEY_ENTITY_NAME
@property
def key_event_id(self):
return ReferenceDataKeys.KEY_EVENT_ID
@property
def key_task_id(self):
return ReferenceDataKeys.KEY_TASK_ID
def find_by_id(self, step_id: str):
query = {self.key_step_id: step_id}
record = self.find_one(query)
if not record:
return None
return ReferenceStepSchema(**record)
def find_by_date_and_step(self, _date: str, step_id: str):
query = {self.key_date: _date, self.key_step_id: step_id}
record = self.find_one(query)
if not record:
return ReferenceStepSchema()
return ReferenceStepSchema(**record)
def find_by_date_and_multi_step(self, _date: str, step_id_list: List, task_id: str = None):
query = {f"{self.key_property}.{self.key_date}": _date, self.key_step_id: {"$in": step_id_list}}
if task_id:
query.update({self.key_task_id: task_id})
records = self.find(query)
if not records:
return dict()
return {step.get("step_id"): step for step in records}
def find_by_multi_step_without_date(self, step_id_list: List, task_id: str = None):
query = {self.key_step_id: {"$in": step_id_list}}
if task_id:
query.update({self.key_task_id: task_id})
records = self.find(query)
if not records:
return dict()
return {step.get("step_id"): step for step in records}
def insert_date(self, _date: date, step_id: str, data):
json_data = {self.key_date: _date, self.key_step_id: step_id, self.key_data: data}
self.insert_one(json_data)
return True
def update_data_with_date(self, _date: str, step_id: str, data, step_category: str, entity_name: str, task_id: str):
if step_category in [StepCategories.PERIODIC, StepCategories.TRIGGER_BASED]:
query = {f"{self.key_property}.{self.key_date}": _date, self.key_step_id: step_id,
self.key_task_id: task_id,
f"{self.key_property}.{self.key_step_category}": step_category, self.key_entity_name: entity_name}
else:
query = {self.key_step_id: step_id, self.key_task_id: task_id,
f"{self.key_property}.{self.key_step_category}": step_category, self.key_entity_name: entity_name}
self.update_one(query, {self.key_data: data}, upsert=True)
return True
def fetch_data_from_query(self, query):
records = self.find(query)
if not records:
return dict()
return {step.get("step_id"): step for step in records}
def find_data_from_query(self, query, sort_json=None, find_one=True):
if sort_json:
records = list(self.find(query, sort=list(sort_json.items())))
else:
records = list(self.find(query))
if not records:
return dict()
if find_one:
return records[0]
return records
def update_data_for_trigger_steps(self, _date: str, step_id: str, data, step_category: str, entity_name: str,
event_id: str):
query = {f"{self.key_property}.{self.key_date}": _date, self.key_step_id: step_id,
f"{self.key_property}.{self.key_event_id}": event_id,
f"{self.key_property}.{self.key_step_category}": step_category, self.key_entity_name: entity_name}
self.update_one(query, {self.key_data: data}, upsert=True)
return True
from typing import Optional, Dict
from scripts.constants import DatabaseNames, CollectionNames
from scripts.constants.db_keys import StepDataFileKeys
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class StepDataFilesSchema(MongoBaseSchema):
template_id: Optional[str]
logbook_id: Optional[str]
associated_workflow_version: Optional[int]
file_id: Optional[str]
file_name: Optional[str]
meta: Optional[Dict] = {}
class StepDataFiles(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_assistant,
collection=CollectionNames.step_data_files)
self.project_id = project_id
@property
def key_template_id(self):
return StepDataFileKeys.KEY_TEMPLATE_ID
@property
def key_file_id(self):
return StepDataFileKeys.KEY_FILE_ID
@property
def key_project_id(self):
return StepDataFileKeys.KEY_PROJECT_ID
def update_data_file(self, file_id, project_id, data, upsert=False):
query = {self.key_file_id: file_id, self.key_project_id: project_id}
return self.update_one(data=data, query=query, upsert=upsert)
def get_step_files_data_by_aggregate(self, query: list):
return list(self.aggregate(pipelines=query))
def find_by_id(self, file_id: str):
query = {self.key_file_id: file_id}
record = self.find_one(query)
if not record:
return StepDataFilesSchema(**dict())
return StepDataFilesSchema(**record)
def delete_data_file(self, file_id):
query = {self.key_file_id: file_id}
return self.delete_one(query=query)
from typing import Optional, Dict
from scripts.constants import DatabaseNames, CollectionNames
from scripts.constants.db_keys import StepTemplateKeys
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class StepTemplatesSchema(MongoBaseSchema):
template_id: Optional[str]
template_name: Optional[str]
logbook_id: Optional[str]
associated_workflow_version: Optional[int] = 1
description: Optional[str]
meta: Optional[Dict] = {}
file_path: Optional[str]
class StepTemplates(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_assistant,
collection=CollectionNames.step_templates)
self.project_id = project_id
@property
def key_template_id(self):
return StepTemplateKeys.KEY_TEMPLATE_ID
@property
def key_template_name(self):
return StepTemplateKeys.KEY_TEMPLATE_NAME
@property
def key_project_id(self):
return StepTemplateKeys.KEY_PROJECT_ID
def get_template_data_by_aggregate(self, query: list):
return list(self.aggregate(pipelines=query))
def delete_template(self, template_id):
query = {self.key_template_id: template_id}
return self.delete_one(query=query)
def find_template(self, template_name: str):
query = {self.key_template_name: template_name}
record = self.find_one(query)
if not record:
return None
return StepTemplatesSchema(**record)
def find_by_id(self, template_id: str):
query = {self.key_template_id: template_id}
record = self.find_one(query)
if not record:
return StepTemplatesSchema(**dict())
return StepTemplatesSchema(**record)
def update_template_data(self, template_id, project_id, data, upsert=False):
query = {self.key_template_id: template_id, self.key_project_id: project_id}
return self.update_one(data=data, query=query, upsert=upsert)
def get_steps_data_data_by_aggregate(self, query: list):
return list(self.aggregate(pipelines=query))
from typing import Dict, List, Optional
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.constants.db_keys import StepRecordKeys
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class StepSchema(MongoBaseSchema):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
step_id: str
meta: Dict
field_elements: Dict
form_factor: List
description: str
project_id: str
step_sub_category: Optional[str]
step_name: str
step_category: str
display_title: str
event_counter: Optional[int]
replicate_type: Optional[str]
auto_save: Optional[str]
display_properties: Optional[List] = []
class StepCollection(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_assistant,
collection=CollectionNames.steps)
self.project_id = project_id
@property
def key_project_id(self):
return StepRecordKeys.KEY_PROJECT_ID
@property
def key_step_id(self):
return StepRecordKeys.KEY_STEP_ID
@property
def key_step_name(self):
return StepRecordKeys.KEY_STEP_NAME
def insert_one_step(self, data):
"""
The following function will insert one tag in the
tags collections
:param self:
:param data:
:return:
"""
return self.insert_one(data)
def update_one_step(self, data, upsert=False, **query):
"""
The following function will update one step in
steps collection based on the given query
:param data:
:param upsert:
:param query:
:return:
"""
return self.update_one(data=data, upsert=upsert, query=query)
def delete_one_step(self, **query):
"""
The following function will delete one tag in
tags collection based on the given query
:param query:
:return:
"""
return self.delete_one(query=query)
def find_many_by_project_id(self, project_id):
"""
The following function will give one process for a given set of
search parameters as keyword arguments
:param project_id:
:return:
"""
query = {self.key_project_id: project_id}
many_process = self.find(query=query)
if not many_process:
return list()
return list(many_process)
def fetch_one_step(self, filter_dict=None, **query):
one_step = self.find_one(filter_dict=filter_dict, query=query)
if not one_step:
return None
return one_step
def fetch_step_name(self, step_id):
query = {self.key_step_id: step_id}
one_step = self.find_one(filter_dict={"step_name": 1, "_id": 0}, query=query)
return one_step["step_name"]
def map_positions_for_templates(self, template_list):
query = {self.key_step_id: {"$in": template_list}}
templates = self.find(query, {"step_id": 1, "_id": 0, "menu_placement": 1})
if not templates:
return list()
return [dict(menu_placement=x["menu_placement"], step_id=x["step_id"]) for x in templates]
def get_step_map(self, steps):
query = {self.key_step_id: {"$in": steps}}
steps_data = self.find(query, {"step_id": 1, "menu_placement": 1, "_id": 0})
if not steps_data:
return list()
step_map = {}
for step in steps_data:
step_map.update({step.get("step_id"): step.get("menu_placement")})
return step_map
def find_many(self, step_list):
query = {"step_id": {"$in": step_list}}
steps = self.find(query)
if not steps:
return dict()
return {step["step_id"]: step["field_elements"] for step in steps}
def get_data_by_aggregate(self, query_json: list):
return list(self.aggregate(query_json))
def modify_component_json(self, step_id, component_json, counter=None):
query = {"step_id": step_id}
data = {"field_elements": {"components": component_json}}
if counter:
data.update({"event_counter": counter})
self.update_one(query, data, upsert=False)
def find_by_id(self, step_id: str):
query = {"step_id": step_id}
record = self.find_one(query)
if not record:
return StepSchema(**dict())
return StepSchema(**record)
from typing import Optional, Dict
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.constants.db_keys import TaskKeys
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class TaskSchema(MongoBaseSchema):
task_info_id: Optional[str]
logbook_id: Optional[str]
project_id: Optional[str]
task_creation_data: Optional[Dict]
meta: Optional[Dict]
reference_id: Optional[str]
status: Optional[str]
task_description: Optional[str]
schedule_meta: Optional[Dict]
previous_stage_details: Optional[Dict] = {}
class TaskCollection(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_assistant,
collection=CollectionNames.tasks)
self.project_id = project_id
@property
def key_project_id(self):
return TaskKeys.KEY_PROJECT_ID
@property
def key_task_id(self):
return TaskKeys.KEY_TASK_ID
def find_by_id(self, project_id: str):
query = {self.key_project_id: project_id}
record = self.find(query)
if not record:
return None
return record
def find_by_task_id(self, task_info_id: str):
query = {self.key_task_id: task_info_id}
record = self.find_one(query)
if not record:
return TaskSchema(**dict())
return TaskSchema(**record)
def update_task(self, task_info_id, data: dict, upsert=False):
query = {self.key_task_id: task_info_id}
return self.update_one(data=data, query=query, upsert=upsert)
def delete_task(self, task_info_id, project_id):
query = {self.key_task_id: task_info_id, self.key_project_id: project_id}
return self.delete_one(query=query)
def get_task_info_data_by_aggregate(self, query: list):
return list(self.aggregate(pipelines=query))
from typing import Optional, Dict, List
from scripts.constants.app_constants import DatabaseNames, CollectionNames, TaskInstanceDataKeys
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class TaskInstanceDataSchema(MongoBaseSchema):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
task_id: Optional[str]
stage_id: Optional[str]
step_id: Optional[str]
step_data: Optional[Dict] = dict()
project_id: Optional[str]
remarks: Optional[List] = list()
status: Optional[bool] = False
class TaskInstanceData(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_assistant,
collection=CollectionNames.task_instance_data)
self.project_id = project_id
@property
def key_stage_id(self):
return TaskInstanceDataKeys.KEY_STAGE_ID
@property
def key_step_id(self):
return TaskInstanceDataKeys.KEY_STEP_ID
@property
def key_task_id(self):
return TaskInstanceDataKeys.KEY_TASK_ID
@property
def key_status(self):
return TaskInstanceDataKeys.KEY_STATUS
def find_by_id(self, stage_id: str):
query = {"stage_id": stage_id}
record = self.find_one(query)
if not record:
return None
return TaskInstanceDataSchema(**record)
def find_by_task_id_step_id(self, task_id: str, step_id: str):
query = {"task_id": task_id, "step_id": step_id}
record = self.find_one(query)
if not record:
return None
return TaskInstanceDataSchema(**record)
def update_by_task_step_id(self, task_id: str, step_id: str, data: dict):
query = {"task_id": task_id, "step_id": step_id}
json_data = {"step_data": data}
self.update_one(query, json_data, True)
def update_stage(self, stage_id, data):
query = {"stage_id": stage_id}
json_data = {"step_data": data}
self.update_one(query, json_data, True)
def update_many_stages(self, stage_list, data):
query = {"stage_id": {'$in': stage_list}}
self.update_many(query, data, True)
def update_stage_data(self, stage_id, data):
query = {"stage_id": stage_id}
self.update_one(query, data, True)
def find_data_by_task_id(self, task_id):
query = {"task_id": task_id}
records = self.find(query)
if not records:
return list()
return [dict(stage_id=record["stage_id"], data=record["step_data"]) for record in records]
def get_stage_map_steps(self, stages):
query = {"stage_id": {"$in": stages}}
stage_data = self.find(query)
if not stage_data:
return dict(), list()
stages_map = dict()
steps = list()
for stage in stage_data:
stages_map.update({stage.get("stage_id"): stage.get("step_id")})
steps.append(stage.get("step_id"))
return stages_map, steps
def find_many(self, stages_list):
query = {"stage_id": {"$in": stages_list}}
stages = self.find(query)
if not stages:
return dict(), dict()
stage_data = dict()
step_data = dict()
for stage in stages:
stage_data.update({stage.get("stage_id"): stage.get("step_data")})
step_data.update({stage.get("stage_id"): stage.get("step_id")})
return stage_data, step_data
def find_data_for_multiple_stages(self, stages_list: list):
query = {"stage_id": {"$in": stages_list}}
records = self.find(query, sort=[('_id', 1)])
if not records:
return list()
return list(records)
def find_data_with_task_id_step_list(self, task_id, steps_list: list):
query = {self.key_task_id: task_id, self.key_step_id: {'$in': steps_list}}
records = list(self.find(query))
if not records:
return list()
return records
def find_all_data_by_task_id(self, task_id):
query = {"task_id": task_id}
records = self.find(query)
if not records:
return list()
return records
import time
from typing import Optional, Dict, List
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.constants.db_keys import TaskKeys
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class TaskInstanceSchema(MongoBaseSchema):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
task_id: Optional[str]
logbook_id: Optional[str]
associated_workflow_id: Optional[str]
associated_workflow_version: Optional[int]
task_info_id: Optional[str]
current_status: Optional[str]
project_id: Optional[str]
reference_id: Optional[str]
task_creation_data: Optional[Dict] = dict()
meta: Optional[Dict] = {}
current_stage: Optional[str]
stages: List[str] = []
task_category: Optional[str]
task_description: Optional[str]
master_details: Optional[Dict] = {}
task_meta_details: Optional[Dict] = {}
class TaskInstance(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_assistant,
collection=CollectionNames.task_instances)
self.project_id = project_id
@property
def key_task_id(self):
return TaskKeys.KEY_TASK_INSTANCE
@property
def key_task_creation_data(self):
return TaskKeys.KEY_TASK_CREATION_DATA
@property
def key_associated_workflow_id(self):
return TaskKeys.KEY_ASSOCIATED_WORKFLOW_ID
@property
def key_workflow_version(self):
return TaskKeys.KEY_WORKFLOW_VERSION
@property
def key_current_status(self):
return TaskKeys.KEY_CURRENT_STATUS
def find_by_logbooks(self, logbooks_list):
query = {"logbook_id": {"$in": logbooks_list}}
tasks = self.find(query)
if not tasks:
return list()
return tasks
def find_by_task_id(self, task_id: str):
query = {self.key_task_id: task_id}
record = self.find_one(query)
if not record:
return None
return TaskInstanceSchema(**record)
def update_instance_task(self, task_id, data: dict, upsert=False):
query = {self.key_task_id: task_id}
return self.update_one(data=data, query=query, upsert=upsert)
def find_by_workflow(self, workflow_id_list):
query = {self.key_associated_workflow_id: {"$in": workflow_id_list}}
tasks = self.find(query)
if not tasks:
return list()
return tasks
def update_by_workflow_id(self, workflow_id, workflow_version, from_state, to_state):
query = {self.key_associated_workflow_id: workflow_id, self.key_workflow_version: workflow_version,
self.key_current_status: from_state}
data = {self.key_current_status: to_state, "meta.completed_at": int(time.time() * 1000)}
return self.update_one(data=data, query=query, upsert=False)
def update_by_task_id(self, task_id, from_state, to_state):
query = {self.key_task_id: task_id,
self.key_current_status: from_state}
data = {self.key_current_status: to_state, "meta.completed_at": int(time.time() * 1000)}
return self.update_one(data=data, query=query, upsert=False)
def update_task_creation_by_task_id(self, task_id, property_dict):
query = {self.key_task_id: task_id}
# To avoid hierarchy overwrite
task_creation_update = {f"{self.key_task_creation_data}.{x}": y for x, y in property_dict.items()}
return self.update_one(data=task_creation_update, query=query, upsert=False)
from typing import Dict, Optional, List
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.constants.db_keys import StepRecordKeys
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class TriggerStepSchema(MongoBaseSchema):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
step_id: str
field_elements: Dict
event_counter: Optional[int]
form_info: Optional[Dict]
time_triggered_for: Optional[List] = []
class TriggerStepCollection(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_assistant,
collection=CollectionNames.trigger_steps)
self.project_id = project_id
@property
def key_project_id(self):
return StepRecordKeys.KEY_PROJECT_ID
@property
def key_step_id(self):
return StepRecordKeys.KEY_STEP_ID
@property
def key_step_name(self):
return StepRecordKeys.KEY_STEP_NAME
def insert_one_step(self, data):
"""
The following function will insert one tag in the
tags collections
:param self:
:param data:
:return:
"""
return self.insert_one(data)
def update_one_step(self, data, upsert=False, **query):
"""
The following function will update one step in
steps collection based on the given query
:param data:
:param upsert:
:param query:
:return:
"""
return self.update_one(data=data, upsert=upsert, query=query)
def delete_one_step(self, step_id, date):
query = {self.key_step_id: step_id, "date": date}
return self.delete_one(query=query)
def find_many_by_project_id(self, project_id):
"""
The following function will give one process for a given set of
search parameters as keyword arguments
:param project_id:
:return:
"""
query = {self.key_project_id: project_id}
many_process = self.find(query=query)
if not many_process:
return list()
return list(many_process)
def fetch_one_step(self, filter_dict=None, **query):
one_step = self.find_one(filter_dict=filter_dict, query=query)
if not one_step:
return None
return one_step
def fetch_step_name(self, step_id):
query = {self.key_step_id: step_id}
one_step = self.find_one(filter_dict={"step_name": 1, "_id": 0}, query=query)
return one_step["step_name"]
def map_positions_for_templates(self, template_list):
query = {self.key_step_id: {"$in": template_list}}
templates = self.find(query, {"step_id": 1, "_id": 0, "menu_placement": 1})
if not templates:
return list()
return [dict(menu_placement=x["menu_placement"], step_id=x["step_id"]) for x in templates]
def get_step_map(self, steps):
query = {self.key_step_id: {"$in": steps}}
steps_data = self.find(query, {"step_id": 1, "menu_placement": 1, "_id": 0})
if not steps_data:
return list()
step_map = {}
for step in steps_data:
step_map.update({step.get("step_id"): step.get("menu_placement")})
return step_map
def find_many(self, step_list):
query = {"step_id": {"$in": step_list}}
steps = self.find(query)
if not steps:
return dict()
return {step["step_id"]: step["field_elements"] for step in steps}
def get_data_by_aggregate(self, query_json: list):
return list(self.aggregate(query_json))
def modify_component_json(self, step_id, date, component_json, field_props, time_list, counter=None):
query = {"step_id": step_id, "date": date}
data = {"step_id": step_id,
"date": date,
"time_triggered_for": time_list,
"field_elements": {"components": component_json}}
data.update(field_props)
if counter:
data.update({"event_counter": counter})
self.update_one(query, data, upsert=True)
def delete_many_triggers(self, step_id):
query = {self.key_step_id: step_id}
one_step = self.delete_many(query)
return one_step
from typing import Optional, Dict
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.constants.db_keys import TriggerKeys, WorkflowKeys
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class TriggerWorkflowSchema(MongoBaseSchema):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
workflow_id: Optional[str]
workflow_version: Optional[int]
trigger_id: Optional[str]
trigger_type: Optional[str]
actions: Optional[list] = []
trigger_meta: Optional[Dict] = {}
class Trigger(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_assistant,
collection=CollectionNames.triggers)
self.project_id = project_id
@property
def key_trigger_id(self):
return TriggerKeys.KEY_TRIGGER_ID
@property
def key_workflow_version(self):
return WorkflowKeys.KEY_WORKFLOW_VERSION
@property
def key_workflow_id(self):
return WorkflowKeys.KEY_WORKFLOW_ID
def update_trigger(self, trigger_id, data: dict, upsert=False):
query = {self.key_trigger_id: trigger_id}
return self.update_one(data=data, query=query, upsert=upsert)
def soft_delete_trigger(self, trigger_id):
json_update = {"is_deleted": True}
query = {self.key_trigger_id: trigger_id}
return self.update_one(data=json_update, query=query)
def find_by_id(self, workflow_id: str, workflow_version: int):
query = {self.key_workflow_id: workflow_id, self.key_workflow_version: workflow_version}
record = self.find_one(query)
if not record:
return TriggerWorkflowSchema(**dict())
return TriggerWorkflowSchema(**record)
def fetch_by_id(self, **query):
if 'role' in query:
query["trigger_meta.role"] = query['role']
query.pop('role')
if 'on_click' in query:
query["trigger_meta.on_click"] = query['on_click']
query.pop('on_click')
record = self.find_one(query)
if not record:
return TriggerWorkflowSchema(**dict())
return TriggerWorkflowSchema(**record)
def find_all_triggers(self, workflow_id: Optional[str], workflow_version: Optional[str]):
query = {self.key_workflow_id: workflow_id, self.key_workflow_version: workflow_version,
"is_deleted": False} if workflow_id and workflow_version else {}
records = self.find(query=query)
if not records:
return list()
return list(records)
from typing import Optional, Dict, List
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.constants.db_keys import WorkflowKeys
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class WorkflowSchema(MongoBaseSchema):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
workflow_id: Optional[str]
workflow_name: Optional[str]
tags: Optional[list] = []
description: Optional[str]
meta: Optional[Dict]
steps: Optional[List] = []
roles: Optional[List] = []
permissions: Optional[List] = []
project_id: Optional[str]
is_deleted: bool = False
workflow_version: Optional[str]
validation: Optional[Dict] = {}
class Workflow(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_assistant,
collection=CollectionNames.workflows)
self.project_id = project_id
@property
def key_workflow_id(self):
return WorkflowKeys.KEY_WORKFLOW_ID
@property
def key_project_id(self):
return WorkflowKeys.KEY_PROJECT_ID
@property
def key_workflow_version(self):
return WorkflowKeys.KEY_WORKFLOW_VERSION
@property
def key_workflow_name(self):
return WorkflowKeys.KEY_WORKFLOW_NAME
def find_by_id(self, workflow_id: str, workflow_version):
query = {self.key_workflow_id: workflow_id, self.key_workflow_version: workflow_version}
record = self.find_one(query)
if not record:
return WorkflowSchema(**dict())
return WorkflowSchema(**record)
def find_name_by_id(self, workflow_id: str):
query = {self.key_workflow_id: workflow_id}
filter_dict = {self.key_workflow_name: 1, "_id": 0}
record = self.find_one(query, filter_dict)
if not record:
return None
return record[self.key_workflow_name]
def fetch_workflow_record(self, **query):
record = self.find_one(query)
if not record:
return None
return record
def add_new_workflow(self, data: dict):
data = WorkflowSchema(**data).dict()
self.insert_one(data)
return WorkflowSchema(**data).workflow_id
def update_workflow(self, workflow_id, data: dict, upsert=False):
query = {self.key_workflow_id: workflow_id}
return self.update_one(data=data, query=query, upsert=upsert)
def soft_delete_workflow(self, workflow_id, workflow_version):
json_update = {"is_deleted": True}
query = {self.key_workflow_id: workflow_id, self.key_workflow_version: workflow_version}
return self.update_one(data=json_update, query=query)
def find_all_workflows(self, project_id: Optional[str]):
query = {self.key_project_id: project_id} if project_id else {}
records = self.find(query)
if not records:
return list()
return records
def find_all_undeleted_workflows(self, project_id: Optional[str]):
query = {self.key_project_id: project_id, "is_deleted": False} if project_id else {"is_deleted": False}
records = self.find(query)
if not records:
return list()
return records
def get_data_by_aggregate(self, query_json: list):
response = list(self.aggregate(query_json))
return response
from typing import Optional
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.constants.db_keys import WorkflowKeys
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class PermissionsWorkflowSchema(MongoBaseSchema):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
workflow_id: Optional[str]
workflow_version: Optional[str]
workflow_status: Optional[str]
user_role: Optional[str]
step_id: Optional[str]
permissions: Optional[list] = []
sequence_no: Optional[int]
class Permissions(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_assistant,
collection=CollectionNames.workflow_permissions)
self.project_id = project_id
@property
def key_workflow_id(self):
return WorkflowKeys.KEY_WORKFLOW_ID
@property
def key_workflow_version(self):
return WorkflowKeys.KEY_WORKFLOW_VERSION
def update_permission(self, workflow_id, data: dict, upsert=False):
query = {self.key_workflow_id: workflow_id}
return self.update_one(data=data, query=query, upsert=upsert)
def add_new_permission(self, data: dict):
data = PermissionsWorkflowSchema(**data).dict()
self.insert_one(data)
return PermissionsWorkflowSchema(**data).workflow_id
def soft_delete_permission(self, workflow_id):
json_update = {"is_deleted": True}
query = {self.key_workflow_id: workflow_id}
return self.update_one(data=json_update, query=query)
def find_by_id(self, workflow_id: str):
query = {self.key_workflow_id: workflow_id}
record = self.find_one(query)
if not record:
return None
return PermissionsWorkflowSchema(**record)
def find_all_permissions(self, workflow_id, workflow_version):
query = {self.key_workflow_id: workflow_id,
self.key_workflow_version: workflow_version}
records = self.find(query=query)
if not records:
return None
return records
def get_data_by_aggregate(self, query_json: list):
return list(self.aggregate(query_json))
class ConfigAggregate:
@staticmethod
def get_project_template(project_id: str):
query_json = [
{
'$match': {
'customer_project_id': project_id
}
}, {
'$addFields': {
'site_template': 'site_template'
}
}, {
'$lookup': {
'from': 'constants',
'localField': 'site_template',
'foreignField': 'type',
'as': 'data'
}
}, {
'$unwind': {
'path': '$data'
}
}, {
'$unwind': {
'path': '$data.data'
}
}, {
'$group': {
'_id': None,
'data': {
'$push': {
'$cond': {
'if': {
'$eq': [
'$site_templt_id', '$data.data.site_templt_id'
]
},
'then': '$data.data.key_list',
'else': '$$REMOVE'
}
}
}
}
}, {
'$project': {
'_id': 0,
'data': {
'$ifNull': [
{
'$arrayElemAt': [
'$data', 0
]
}, False
]
}
}
}
]
return query_json
@staticmethod
def site_hierarchy(project_id: str):
query_json = [
{'$match': {
'customer_project_id': project_id
}},
{"$project":
{
"_id": 0,
"label": "$site_name",
"value": "$site_id",
"hierarchy_id": "$site_id"
}}
]
return query_json
@staticmethod
def get_site_hierarchy(project_id: str, path: str, label_query: list, value_query: list,
add_fields={}):
query_json = [
{
'$match': {
'customer_project_id': project_id
}
}, {
'$unwind': {
'path': f'${path}'
}
},
{
'$addFields': add_fields
},
{
'$group': {
'_id': {
'label': {
'$concat': label_query
},
'value': {
'$concat': value_query
}
}
}
},
{
'$replaceRoot': {
'newRoot': '$_id'
}
}
]
if not bool(add_fields):
query_json.pop(2)
return query_json
@staticmethod
def fetch_level_one_hierarchy_details(site_id: str, hierarchy_type, fun_type=False):
query_json = [
{'$match': {'site_id': site_id}}, {
'$unwind': {
'path': f'${hierarchy_type}'
}
}, {
'$group': {
"_id": None,
'data': {"$push": {
'label': f'${hierarchy_type}.{hierarchy_type}_name',
"hierarchy_id": f'${hierarchy_type}.{hierarchy_type}_id',
'value': {
'$concat': ["$site_id", {"$literal": "$"}, f'${hierarchy_type}.{hierarchy_type}_id']}
}}
}}]
if fun_type:
query_json.pop()
query_json.append({
'$group': {
"_id": None,
'data': {"$push": {
'k': {'$ifNull': [f'${hierarchy_type}.{hierarchy_type}_name', ""]},
'v': {'$ifNull': [{
'$concat': ["$site_id", {"$literal": "$"}, f'${hierarchy_type}.{hierarchy_type}_id']}, ""]
}}
}}})
query_json.append({
'$replaceRoot': {
'newRoot': {
'$arrayToObject': '$data'
}
}
})
return query_json
@staticmethod
def fetch_level_two_hierarchy_details(site_id: str, hierarchy_id: str, hierarchy_type: str, concat_list: list,
fun_type=False):
query_hierarchy_type = hierarchy_id.split("_")[0]
query_json = [
{
'$match': {
'site_id': site_id
}
}, {
'$unwind': {
'path': f'${hierarchy_type}'
}
}, {
'$group': {
'_id': None,
'data': {
'$push': {
'$cond': [
{
'$eq': [
hierarchy_id, f'${hierarchy_type}.{query_hierarchy_type}_id'
]
}, {
'label': f'${hierarchy_type}.{hierarchy_type}_name',
'hierarchy_id': f'${hierarchy_type}.{hierarchy_type}_id',
'value': {"$concat": concat_list}
}, '$$REMOVE'
]
}
}
}
}]
if fun_type:
query_json.pop()
query_json.append({
'$group': {
'_id': None,
'data': {
'$push': {
'$cond': [
{
'$eq': [
hierarchy_id, f'${hierarchy_type}.{query_hierarchy_type}_id'
]
}, {
'k': {'$ifNull': [f'${hierarchy_type}.{hierarchy_type}_name', ""]},
'v': {"$concat": concat_list}
}, '$$REMOVE'
]
}
}
}
})
query_json.append({
'$replaceRoot': {
'newRoot': {
'$arrayToObject': '$data'
}
}})
return query_json
@staticmethod
def list_logbook_info():
query_json = [
{
'$match': {}
},
{
'$project': {
'_id': 0,
'project_id': '$project_id',
'logbook_id': '$logbook_id',
'created_on': '$meta.created_at',
'created_by': '$meta.created_by',
'logbook_name': '$logbook_name',
'logbook_description': '$logbook_description',
'logbook_type': '$logbook_type',
'workflow_spec_id': "$workflow_spec_id",
'hierarchy': '$hierarchy'
}
}
]
return query_json
class IlensConfigMobileMongoQueries:
@staticmethod
def site_hierarchy(project_id: str):
query_json = [
{'$match': {
'customer_project_id': project_id
}},
{"$project":
{
"_id": 0,
"value": "$site_name",
"id": "$site_id"
}}
]
return query_json
@staticmethod
def fetch_level_one_hierarchy_details(site_id: str, hierarchy_type):
query_json = [
{'$match': {'site_id': site_id}}, {
'$unwind': {
'path': f'${hierarchy_type}'
}
}, {
'$group': {
"_id": None,
'data': {"$push": {
'value': f'${hierarchy_type}.{hierarchy_type}_name',
'id': {
'$concat': ["$site_id", {"$literal": "$"}, f'${hierarchy_type}.{hierarchy_type}_id']}
}}
}}]
return query_json
@staticmethod
def fetch_level_two_hierarchy_details(site_id: str, hierarchy_id: str, hierarchy_type: str, concat_list: list):
query_hierarchy_type = hierarchy_id.split("_")[0]
query_json = [
{
'$match': {
'site_id': site_id
}
}, {
'$unwind': {
'path': f'${hierarchy_type}'
}
}, {
'$group': {
'_id': None,
'data': {
'$push': {
'$cond': [
{
'$eq': [
hierarchy_id, f'${hierarchy_type}.{query_hierarchy_type}_id'
]
}, {
'value': f'${hierarchy_type}.{hierarchy_type}_name',
'id': {"$concat": concat_list}
}, '$$REMOVE'
]
}
}
}
}]
return query_json
class SiteConfAggregate:
@staticmethod
def get_site_list_key_value(project_id: str):
query_json = [
{
'$match': {
'customer_project_id': project_id
}
}, {
'$group': {
'_id': None,
'data': {
'$push': {
'k': '$site_id',
'v': '$$ROOT'
}
}
}
}, {
'$replaceRoot': {
'newRoot': {
'$arrayToObject': '$data'
}
}
}
]
return query_json
\ No newline at end of file
class UserRoleAggregate:
@staticmethod
def get_user_roles_list(project_id: str):
query_json = [{
"$match": {
"project_id": project_id
}}, {'$group': {'_id': None, 'data': {
'$push': {'k': {'$ifNull': ['$user_role_id', '']}, 'v': {'$ifNull': ['$user_role_name', '']}}}}},
{'$replaceRoot': {'newRoot': {'$arrayToObject': '$data'}}}]
return query_json
@staticmethod
def user_roles_list_key_value_pairs(project_id: str, user_roles: list = None):
query_dict = dict()
query_dict.update(project_id=project_id)
if bool(user_roles):
query_dict.update({
"user_role_id": {"$in": user_roles}
})
query_json = [{
"$match": query_dict}, {'$project': {'_id': 0,
'value': {'$ifNull': ['$user_role_id', '']},
'label': {'$ifNull': ['$user_role_name', '']}}}]
return query_json
class UsersAggregate:
@staticmethod
def get_users_list(project_id=None):
query_json = [{'$group': {'_id': None, 'data': {
'$push': {'k': {'$ifNull': ['$user_id', '']}, 'v': {'$ifNull': ['$username', '']}}}}},
{'$replaceRoot': {'newRoot': {'$arrayToObject': '$data'}}}]
if bool(project_id):
query_json.insert(0, {"$match": {"project_id": project_id}})
return query_json
from typing import Any, Optional
from scripts.constants import CommonKeys
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class ConstantsSchema(MongoBaseSchema):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
type: str
data: Any
tableData: Optional[Any]
class Constants(MongoCollectionBaseClass):
def __init__(self, mongo_client):
super().__init__(mongo_client, database=DatabaseNames.ilens_configuration,
collection=CollectionNames.constants)
@property
def key_type(self):
return CommonKeys.KEY_TYPE
def find_constant_by_dict(self, _type):
"""
The following function will give one record for a given set of
search parameters as keyword arguments
:param _type:
:return:
"""
record = self.find_one(query={self.key_type: _type})
if not record:
return dict(record)
return dict(record)
def find_constant(self, _type, filter_dict=None):
"""
The following function will give one record for a given set of
search parameters as keyword arguments
:param _type:
:param filter_dict:
:return:
"""
query = {self.key_type: _type}
record = self.find_one(query=query, filter_dict=filter_dict)
if not record:
return ConstantsSchema()
return ConstantsSchema(**record)
def find_constant_dict(self, _type, filter_dict=None):
"""
The following function will give one record for a given set of
search parameters as keyword arguments
:param _type:
:param filter_dict:
:return:
"""
query = {self.key_type: _type}
record = self.find_one(query=query, filter_dict=filter_dict)
if not record:
return dict()
return dict(record)
def insert_one_constant(self, data):
"""
The following function will insert one tag in the
tags collections
:param self:
:param data:
:return:
"""
return self.insert_one(data)
def find_constant_by_content(self, content_type):
"""
The following function will give one record for a given set of
search parameters as keyword arguments
"""
query = {"content_type": content_type}
search_option = {"data": 1}
record = self.find_one(query=query, filter_dict=search_option)
if not record:
return dict()
return record
from typing import Optional, Dict
from scripts.constants.app_constants import CustomerProjectKeys
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class CustomerProjectsSchema(MongoBaseSchema):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
customer_project_name: Optional[str]
description: Optional[str]
site_templt_id: Optional[str]
logo_name: Optional[str]
logo_url: Optional[str]
process_templt_id: Optional[str]
update_details: Optional[Dict]
user_id: Optional[str]
customer_project_id: Optional[str]
product_encrypted: Optional[bool]
class CustomerProjects(MongoCollectionBaseClass):
def __init__(self, mongo_client):
super().__init__(mongo_client, database=DatabaseNames.ilens_configuration,
collection=CollectionNames.customer_projects)
@property
def key_customer_project_id(self):
return CustomerProjectKeys.KEY_CUSTOMER_PROJECT_ID
@property
def key_customer_project_name(self):
return CustomerProjectKeys.KEY_CUSTOMER_PROJECT_NAME
def find_project(self, project_id=None, project_name=None, filter_dict=None):
"""
The following function will give one record for a given set of
search parameters as keyword arguments
:param filter_dict:
:param project_id:
:return:
"""
query = dict()
if project_id:
query.update({self.key_customer_project_id: project_id})
if project_name:
query.update({self.key_customer_project_name: project_name})
record = self.find_one(query=query, filter_dict=filter_dict)
if not record:
return dict()
return CustomerProjectsSchema(**record).dict()
def find_project_by_query(self, query, filter_dict=None):
record = self.find(query=query, filter_dict=filter_dict)
if record:
return record
return list()
def insert_one_project(self, data):
"""
The following function will insert one project in the
customer_projects collections
:param self:
:param data:
:return:
"""
return self.insert_one(data)
def update_one_project(self, project_id, data):
query = {self.key_customer_project_id: project_id}
return self.update_one(query=query, data=data)
def delete_one_project(self, project_id):
if project_id:
query = {self.key_customer_project_id: project_id}
return self.delete_one(query)
else:
return False
def get_project_data_by_aggregate(self, query: list):
return list(self.aggregate(pipelines=query))
from typing import Any, Optional, List
from scripts.constants import CommonKeys
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class LookupTableSchema(MongoBaseSchema):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
lookup_id: Optional[str]
lookup_name: Optional[str]
lookup_data: Optional[List]
project_id: Optional[Any]
description: Optional[str]
class LookupTable(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_configuration,
collection=CollectionNames.lookup_table)
self.project_id = project_id
@property
def key_type(self):
return CommonKeys.KEY_LOOKUP
def find_constant_by_dict(self, _type):
"""
The following function will give one record for a given set of
search parameters as keyword arguments
:param _type:
:return:
"""
record = self.find_one(query={self.key_type: _type})
if not record:
return dict(record)
return dict(record)
def find_lookup_dict(self, lookup_name, filter_dict=None):
"""
The following function will give one record for a given set of
search parameters as keyword arguments
:param _type:
:param filter_dict:
:return:
"""
query = {self.key_type: lookup_name}
record = self.find_one(query=query, filter_dict=filter_dict)
if not record:
return dict()
return dict(record)
def map_lookup_keys(self, type):
query = {self.key_type: type}
_record = self.find_one(query=query)
if not _record:
return dict()
return {record["lookupdata_id"]: record["lookup_value"] for record in _record["lookup_data"]}
def insert_one_constant(self, data):
"""
The following function will insert one tag in the
tags collections
:param self:
:param data:
:return:
"""
return self.insert_one(data)
def find_constant_by_content(self, content_type):
"""
The following function will give one record for a given set of
search parameters as keyword arguments
"""
query = {"content_type": content_type}
search_option = {"data": 1}
record = self.find_one(query=query, filter_dict=search_option)
if not record:
return dict()
return record
from typing import Optional, Union, List, Any, Dict
from pydantic import BaseModel
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.utils.mongo_util import MongoCollectionBaseClass
class RuleTargets(MongoCollectionBaseClass):
def __init__(self, mongo_client):
super().__init__(
mongo_client, database=DatabaseNames.ilens_configuration, collection=CollectionNames.rule_targets
)
\ No newline at end of file
from typing import List, Optional
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class ShiftsCollectionKeys:
KEY_PROJECT_ID = "project_id"
KEY_SHIFT_NAME = "shift_name"
KEY_SHIFT_ID = "shift_id"
KEY_ACTIVITY_NAME = "activity_name"
KEY_SHIFT_START_TIME = "shift_start_time"
KEY_SHIFT_START = "shift_start"
KEY_SHIFT_END_TIME = "shift_end_time"
KEY_SHIFT_END = "shift_end"
KEY_SHIFT_DESCRIPTION = "shift_description"
KEY_ACTIVITIES = "activities"
KEY_PRODUCT_ENCRYPTED = "product_encrypted"
class ShiftsSchema(MongoBaseSchema):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
project_id: Optional[str]
shift_name: Optional[str]
shift_id: Optional[str]
shift_description: Optional[str]
activities: Optional[List]
product_encrypted: Optional[bool]
class Shifts(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_configuration,
collection=CollectionNames.shifts)
self.project_id = project_id
@property
def key_project_id(self):
return ShiftsCollectionKeys.KEY_PROJECT_ID
@property
def key_shift_name(self):
return ShiftsCollectionKeys.KEY_SHIFT_NAME
@property
def key_shift_id(self):
return ShiftsCollectionKeys.KEY_SHIFT_ID
@property
def key_shift_description(self):
return ShiftsCollectionKeys.KEY_SHIFT_DESCRIPTION
@property
def key_activities(self):
return ShiftsCollectionKeys.KEY_ACTIVITIES
@property
def key_product_encrypted(self):
return ShiftsCollectionKeys.KEY_PRODUCT_ENCRYPTED
def find_shifts_by_project_id(self, project_id):
query = {self.key_project_id: project_id}
all_shifts = self.find(query=query)
if not all_shifts:
return list()
return list(all_shifts)
from typing import Optional, Dict, List
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.constants.app_constants import SiteConfCollectionKeys
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.common_utils import CommonUtils
from scripts.utils.mongo_util import MongoCollectionBaseClass
class SiteConfSchema(MongoBaseSchema):
site_name: Optional[str]
site_info: Optional[Dict]
customer_project_id: Optional[str]
site_id: Optional[str]
product_encrypted: Optional[bool]
dept: Optional[List]
line: Optional[List]
equipment: Optional[List]
class SiteConf(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_configuration,
collection=CollectionNames.site_conf)
self.com_utils = CommonUtils(project_id=project_id)
self.project_id = project_id
@property
def key_customer_project_id(self):
return SiteConfCollectionKeys.KEY_CUSTOMER_PROJECT_ID
@property
def key_site_id(self):
return SiteConfCollectionKeys.KEY_SITE_ID
@property
def key_site_name(self):
return SiteConfCollectionKeys.KEY_SITE_NAME
@property
def key_process_id(self):
return SiteConfCollectionKeys.KEY_PROCESS_ID
def get_all_sites(self, filter_dict=None,
sort=None, skip=0, limit=None, site_id=None, **query):
"""
The following function will give all sites for the given set of
search parameters as keyword arguments
:param filter_dict:
:param sort:
:param skip:
:param limit:
:param query:
:return:
"""
if site_id is not None:
query.update({self.key_site_id: site_id})
sites = self.find(filter_dict=filter_dict, sort=sort, skip=skip, limit=limit, query=query)
if sites:
return list(sites)
return list()
def find_site_by_site_name(self, site_name, project_id):
query_json = {self.key_site_name: site_name, self.key_customer_project_id: project_id}
response = self.find_one(query=query_json)
if response:
return dict(response)
else:
return dict()
def find_site_by_site_id(self, site_id, filter_dict=None, find_condition=dict()):
if bool(find_condition):
query = find_condition
else:
query = {self.key_site_id: site_id}
site = self.find_one(query=query, filter_dict=filter_dict)
if site:
return dict(site)
return dict()
def find_site_by_query(self, query):
site = self.find(query=query)
if site:
return site
return list()
def find_sites_by_project(self, project_id):
sites = self.find(query={self.key_customer_project_id: project_id})
if sites:
return list(sites)
return list()
def find_accessible_sites(self, project_id, accessible_sites):
query_dict = {self.key_customer_project_id: project_id, self.key_site_id: {"$in": accessible_sites}}
sites = self.find(query=query_dict)
if sites:
return list(sites)
return list()
def delete_one_site(self, site_id):
if site_id:
query = {self.key_site_id: site_id}
return self.delete_one(query=query)
else:
return False
def delete_many_site(self, query=None):
return self.delete_many(query)
def update_one_site(self, site_id, data, upsert=False):
query = {self.key_site_id: site_id}
return self.update_one(data=data, query=query, upsert=upsert)
def update_one_process(self, process_id, updated_data):
"""
The following function will update one tag in
tags collection based on the given query
"""
query_dict = {self.key_process_id: process_id}
return self.update_one(data=updated_data, query=query_dict)
def update_many_site(self, site_id, data):
query = {self.key_site_id: site_id}
response = self.update_many(query=query, data=data)
if response:
return list(response)
else:
return list()
def insert_one_site(self, data):
response = self.insert_one(data=data)
return response
def insert_many_sites(self, data):
response = self.insert_many(data=data)
if response:
return list(response)
else:
return list()
def distinct_site_by_key(self, query_key, project_id):
filter_dict = {self.key_customer_project_id: project_id}
response = self.distinct(query_key, filter_dict)
if not response:
return list()
return response
def get_site_data_by_aggregate(self, query: list):
return list(self.aggregate(pipelines=query))
from typing import Optional, List
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.constants.db_keys import TagKeys
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class TagSchema(MongoBaseSchema):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
tag_name: Optional[str]
unit: Optional[str]
tag_type: Optional[str]
description: Optional[str]
id: Optional[str]
tag_group_id: Optional[str]
data_type: Optional[float]
default: Optional[bool]
system_tag_type: Optional[str]
tag_category_id: Optional[str]
value_list: Optional[List]
product_encrypted: Optional[bool]
class Tag(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_configuration,
collection=CollectionNames.tags)
self.project_id = project_id
@property
def key_tag_id(self):
return TagKeys.KEY_TAG_ID
@property
def key_tag_name(self):
return TagKeys.KEY_TAG_NAME
def find_name_by_id(self, tag_id: str):
query = {self.key_tag_id: tag_id}
filter_dict = {self.key_tag_name: 1, "_id": 0}
record = self.find_one(query, filter_dict)
if not record:
return None
return record[self.key_tag_name]
from typing import Optional
from scripts.constants import UniqueIdKeys
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class UniqueIdSchema(MongoBaseSchema):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
key: Optional[str]
id: Optional[str]
product_encrypted: Optional[bool] = False
class UniqueId(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_configuration,
collection=CollectionNames.unique_id)
self.project_id = project_id
@property
def key_key(self):
return UniqueIdKeys.KEY_KEY
def find_one_record(self, **kwargs):
"""
The following function will give one record for a given set of
search parameters as keyword arguments
:param kwargs:
:return:
"""
record = self.find_one(query=kwargs)
if not record:
return UniqueIdSchema()
return UniqueIdSchema(**record)
def insert_record(self, record: UniqueIdSchema):
"""
The following function will give one record for a given set of
search parameters as keyword arguments
:param record:
:return:
"""
self.insert_one(record.dict())
return record.id
def update_record(self, record: UniqueIdSchema):
"""
The following function will give one record for a given set of
search parameters as keyword arguments
:param record:
:return:
"""
self.update_one(query={self.key_key: record.key}, data=record.dict())
return record.id
def update_one_record(self, key, updated_data):
query = {self.key_key: key}
return self.update_one(data=updated_data, query=query)
def find_record(self, key, filter_dict=None):
query = {self.key_key: key}
unique_key = self.find_one(query=query, filter_dict=filter_dict)
if unique_key:
return dict(unique_key)
else:
return unique_key
from typing import Optional, Dict, List
from scripts.constants import UserCollectionKeys
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class UserSchema(MongoBaseSchema):
name: Optional[str]
project_id: Optional[str]
username: Optional[str]
password: Optional[str]
email: Optional[str]
phonenumber: Optional[Dict]
userrole: Optional[List[str]]
user_type: Optional[str]
user_id: Optional[str]
AccessLevel: Optional[Dict]
user_access_select_all: Optional[bool]
access_group_ids: Optional[List[str]]
client_id: Optional[str]
created_by: Optional[str]
hmi: Optional[Dict]
encryption_salt: Optional[Dict]
product_encrypted: Optional[bool]
email_preferences: Optional[Dict]
language: Optional[str]
passwordReset: Optional[Dict]
failed_attempts: Optional[int]
is_user_locked: Optional[bool]
last_failed_attempt: Optional[str]
profileImage_name: Optional[str]
profileImage_url: Optional[str]
date_format: Optional[str]
date_time_format: Optional[str]
time_format: Optional[str]
tz: Optional[str]
app_url: Optional[str]
landing_page: Optional[str]
ilens_encrypted: Optional[bool]
class User(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_configuration,
collection=CollectionNames.user)
self.project_id = project_id
@property
def key_username(self):
return UserCollectionKeys.KEY_USERNAME
@property
def key_user_id(self):
return UserCollectionKeys.KEY_USER_ID
@property
def key_language(self):
return UserCollectionKeys.KEY_LANGUAGE
@property
def key_name(self):
return UserCollectionKeys.KEY_NAME
@property
def key_project_id(self):
return UserCollectionKeys.KEY_PROJECT_ID
@property
def key_userrole(self):
return UserCollectionKeys.KEY_USER_ROLE
def get_all_users(self, filter_dict=None,
sort=None, skip=0, limit=None, **query):
users = self.find(filter_dict=filter_dict, sort=sort, skip=skip, limit=limit, query=query)
if users:
return list(users)
return list()
def find_user(self, user_id):
user = self.find_one(query={self.key_user_id: user_id})
if user:
return dict(user)
return user
def find_user_by_param(self, **query):
user = self.find_one(query)
if user:
return UserSchema(**user)
return user
def find_access_site(self, user_id):
access_site = self.distinct(query_key="AccessLevel.sites.parent_id", filter_json=dict(user_id=user_id))
if access_site:
return list(access_site)
return list()
def find_by_query_key(self, query_key, user_id):
access_site = self.distinct(query_key=query_key, filter_json={self.key_user_id: user_id})
if access_site:
return access_site
return list()
def update_one_user(self, user_id, data):
query = {self.key_user_id: user_id}
response = self.update_one(query=query, data=data)
return response
def delete_one_user(self, user_id):
return self.delete_one(query={self.key_user_id: user_id})
def users_by_project_and_site(self, user_id, project_id, site_ids):
query = {"$or": [{self.key_user_id: user_id}, {"AccessLevel.sites.parent_id": {"$in": site_ids}}]}
if project_id is not None:
query["$or"].append({self.key_project_id: project_id})
response = self.find(query)
if not response:
return list()
return response
def find_user_time_zone(self, user_id):
search_json = {
"_id": 0,
"tz": 1,
"date_format": 1,
"time_format": 1,
"date_time_format": 1
}
response = self.find_one(filter_dict=search_json, query={self.key_user_id: user_id})
return response
# def find_decrypted_user(self, user_id):
# user = self.find_decrypted(query={self.key_user_id: user_id})
# if user:
# return user
# return user
def distinct_user(self, query_key, filter_json):
query = {self.key_user_id: filter_json}
return self.distinct(query_key=query_key, filter_json=query)
def users_list_by_aggregate(self, query: list):
return self.aggregate(pipelines=query)
def find_user_by_project_id(self, user_id, project_id):
user = self.find_one(query={self.key_user_id: user_id, self.key_project_id: project_id})
if user:
return dict(user)
return user
def find_user_data_with_roles(self, roles, project_id):
query = {self.key_userrole: {'$in': roles}, self.key_project_id: project_id}
response = list(self.find(query))
if response:
return response
return list()
from scripts.constants import DatabaseNames, CollectionNames, UserCollectionKeys
from scripts.utils.mongo_util import MongoCollectionBaseClass
class UserProject(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_configuration,
collection=CollectionNames.user_project)
self.project_id = project_id
@property
def key_username(self):
return UserCollectionKeys.KEY_USERNAME
@property
def key_user_id(self):
return UserCollectionKeys.KEY_USER_ID
@property
def key_language(self):
return UserCollectionKeys.KEY_LANGUAGE
@property
def key_name(self):
return UserCollectionKeys.KEY_NAME
@property
def key_email(self):
return UserCollectionKeys.KEY_EMAIL
@property
def key_project_id(self):
return UserCollectionKeys.KEY_PROJECT_ID
def fetch_user_project(self, user_id, project_id):
query = {self.key_user_id: user_id, self.key_project_id: project_id}
user = self.find_one(query=query)
return user
def list_projects(self, user_id):
query = {self.key_user_id: user_id}
return self.distinct(query_key=self.key_project_id, filter_json=query)
def fetch_users(self, project_id):
query = {self.key_project_id: project_id}
return self.find(query=query)
def insert_one_user(self, data):
"""
The following function will insert one user in the
user collections
:param self:
:param data:
:return:
"""
return self.insert_one(data)
def update_one_user_project(self, data, user_id, project_id):
query = {self.key_user_id: user_id, self.key_project_id: project_id}
response = self.update_one(query=query, data=data)
return response
def delete_user(self, user_id):
if user_id:
return self.delete_many(query={self.key_user_id: user_id})
else:
return False
def delete_user_project(self, user_id, project_id):
if user_id:
return self.delete_one(query={self.key_user_id: user_id, self.key_project_id: project_id})
else:
return False
from typing import Optional, Dict
from scripts.constants.app_constants import DatabaseNames, CollectionNames
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class UserRoleSchema(MongoBaseSchema):
type: Optional[str]
user_role_name: Optional[str]
user_role_description: Optional[str]
user_role_id: Optional[str]
project_id: Optional[str]
user_role_permissions: Optional[Dict]
access_levels: Optional[Dict]
default: Optional[bool]
client_id: Optional[str]
product_encrypted: Optional[bool]
permission_status: Optional[bool]
class UserRoleCollectionKeys:
KEY_USER_ROLE_ID = "user_role_id"
KEY_PROJECT_ID = "project_id"
class UserRole(MongoCollectionBaseClass):
def __init__(self, mongo_client):
super().__init__(mongo_client, database=DatabaseNames.ilens_configuration,
collection=CollectionNames.user_role)
@property
def key_user_role_id(self):
return UserRoleCollectionKeys.KEY_USER_ROLE_ID
@property
def key_project_id(self):
return UserRoleCollectionKeys.KEY_PROJECT_ID
def find_roles(self):
access_groups = self.find({})
if access_groups:
return access_groups
return list()
def find_role_by_project(self, project_id):
query = {self.key_project_id: project_id}
access_groups = self.find_one(query)
if access_groups:
return access_groups
return list()
def find_role_by_param(self, **query):
access_groups = self.find_one(query)
if access_groups:
return access_groups
return dict()
def find_roles_by_list(self, user_role_id_list, project_id):
query = {"$or": [{self.key_user_role_id: {"$in": user_role_id_list}}]}
if self.key_project_id is not None:
query["$or"].append({self.key_project_id: project_id})
access_groups = self.find(query)
if access_groups:
return access_groups
return list()
def find_user_role_by_id(self, user_role_id, filter_dict=None):
user = self.find_one(query={self.key_user_role_id: user_role_id}, filter_dict=filter_dict)
return user
def update_user_role(self, _id, data):
query = {self.key_user_role_id: _id}
self.update_one(query=query, data=data)
def delete_user_role(self, _id):
if _id:
query = {self.key_user_role_id: _id}
self.delete_one(query=query)
else:
return False
def get_data_by_aggregate(self, query_json: list):
response = list(self.aggregate(query_json))
return response
from typing import Optional,List
from pydantic import BaseModel
class MongoBaseSchema(BaseModel):
pass
class BaseRequestSchema(BaseModel):
"""
This is base schema for input requests to the Collection Class
"""
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy_utils import database_exists, create_database
from scripts.config.app_configurations import DBConf
maintenance_engine = create_engine(DBConf.MAINTENANCE_DB_URI)
if not database_exists(maintenance_engine.url):
create_database(maintenance_engine.url)
assistant_engine = create_engine(DBConf.ASSISTANT_DB_URI)
if not database_exists(assistant_engine.url):
create_database(assistant_engine.url)
maintenance_session = sessionmaker(autocommit=False, autoflush=False, bind=maintenance_engine)
assistant_session = sessionmaker(autocommit=False, autoflush=False, bind=assistant_engine)
Base = declarative_base()
# Dependency
def get_db():
db = maintenance_session()
try:
yield db
finally:
db.close()
def get_assistant_db():
db = assistant_session()
try:
yield db
finally:
db.close()
from typing import Optional, Union
from pydantic.validators import str_validator
def empty_to_none(v: str) -> Optional[str]:
if v == '':
return None
return v
class EmptyStrToNone(str):
@classmethod
def __get_validators__(cls):
yield str_validator
yield empty_to_none
none_or_float = Union[None, float, EmptyStrToNone]
from pydantic import BaseModel
from sqlalchemy import Column, Integer, Float
from scripts.db.psql.databases import Base
from . import none_or_float
class DBModelGenStdNorm(Base):
__tablename__ = "gen_std_norm"
id = Column(Integer, autoincrement=True, primary_key=True)
dfg_generation = Column(Float, nullable=True)
ibp_consumption = Column(Float, nullable=True)
ibp_actualconsumption = Column(Float, nullable=True)
bp_yield = Column(Float, nullable=True)
residue_consumption = Column(Float, nullable=True)
residue_actualconsumption = Column(Float, nullable=True)
residue_yield = Column(Float, nullable=True)
effluent_consumption = Column(Float, nullable=True)
effluent_actualconsumption = Column(Float, nullable=True)
effluent_yield = Column(Float, nullable=True)
class GenStdNormSchema(BaseModel):
dfg_generation: none_or_float
ibp_consumption: none_or_float
ibp_actualconsumption: none_or_float
bp_yield: none_or_float
residue_consumption: none_or_float
residue_actualconsumption: none_or_float
residue_yield: none_or_float
effluent_consumption: none_or_float
effluent_actualconsumption: none_or_float
effluent_yield: none_or_float
from pydantic import BaseModel
from sqlalchemy import Column, Integer, String, Float
from scripts.db.psql.databases import Base
from scripts.db.psql.models.ope_formula_calculation import none_or_float
class DBModelMTDOEESummary(Base):
__tablename__ = "mtd_oee_summary"
id = Column(Integer, autoincrement=True, primary_key=True)
date = Column(String)
step_id = Column(String)
capacity_mtd = Column(Float, nullable=True)
total_time_mtd = Column(Float, nullable=True)
available_time_mtd = Column(Float, nullable=True)
total_production_good_mtd = Column(Float, nullable=True)
total_production_bad_mtd = Column(Float, nullable=True)
availability_mtd = Column(Float, nullable=True)
performance_mtd = Column(Float, nullable=True)
quality_mtd = Column(Float, nullable=True)
ope_mtd = Column(Float, nullable=True)
oee_net_available_time_mtd = Column(Float, nullable=True)
oee_planned_time_mtd = Column(Float, nullable=True)
oee_availability_mtd = Column(Float, nullable=True)
oee_performance_mtd = Column(Float, nullable=True)
oee_quality_mtd = Column(Float, nullable=True)
oee_mtd = Column(Float, nullable=True)
oee_ytd_target = Column(Float, nullable=True)
class MTDOEESummarySchema(BaseModel):
capacity_mtd: none_or_float
total_time_mtd: none_or_float
available_time_mtd: none_or_float
total_production_good_mtd: none_or_float
total_production_bad_mtd: none_or_float
availability_mtd: none_or_float
performance_mtd: none_or_float
quality_mtd: none_or_float
ope_mtd: none_or_float
oee_net_available_time_mtd: none_or_float
oee_planned_time_mtd: none_or_float
oee_availability_mtd: none_or_float
oee_performance_mtd: none_or_float
oee_quality_mtd: none_or_float
oee_mtd: none_or_float
oee_ytd_target: none_or_float
from pydantic import BaseModel
from sqlalchemy import Column, Integer, String, Float
from scripts.db.psql.databases import Base
from scripts.db.psql.models.ope_formula_calculation import none_or_float
class DBModelDailyProduction(Base):
__tablename__ = "oee_daily_production"
id = Column(Integer, autoincrement=True, primary_key=True)
date = Column(String)
step_id = Column(String)
output = Column(Float, nullable=True)
rework = Column(Float, nullable=True)
defect = Column(Float, nullable=True)
shift_incharge = Column(String, nullable=True)
class DailyProductionSchema(BaseModel):
output: none_or_float
rework: none_or_float
defect: none_or_float
shift_incharge: none_or_float
from pydantic import BaseModel
from sqlalchemy import Column, Integer, String, Float
from scripts.db.psql.databases import Base
from scripts.db.psql.models.ope_formula_calculation import none_or_float
class DBModelOEESummary(Base):
__tablename__ = "oee_summary"
id = Column(Integer, autoincrement=True, primary_key=True)
date = Column(String)
step_id = Column(String)
capacity = Column(Float, nullable=True)
total_time = Column(Float, nullable=True)
available_time = Column(Float, nullable=True)
total_production_good = Column(Float, nullable=True)
total_production_bad = Column(Float, nullable=True)
availability = Column(Float, nullable=True)
performance = Column(Float, nullable=True)
quality = Column(Float, nullable=True)
ope = Column(Float, nullable=True)
oee_net_available_time = Column(Float, nullable=True)
oee_planned_time = Column(Float, nullable=True)
oee_availability = Column(Float, nullable=True)
oee_performance = Column(Float, nullable=True)
oee_quality = Column(Float, nullable=True)
oee = Column(Float, nullable=True)
class OEESummarySchema(BaseModel):
capacity: none_or_float
total_time: none_or_float
available_time: none_or_float
total_production_good: none_or_float
total_production_bad: none_or_float
availability: none_or_float
performance: none_or_float
quality: none_or_float
ope: none_or_float
oee_net_available_time: none_or_float
oee_planned_time: none_or_float
oee_availability: none_or_float
oee_performance: none_or_float
oee_quality: none_or_float
oee: none_or_float
from pydantic.main import BaseModel
from sqlalchemy import Column, Integer, String
from scripts.db.psql.databases import Base
from scripts.db.psql.models.ope_formula_calculation import none_or_float
class DBModelMaster(Base):
__tablename__ = "oee_master_table"
id = Column(Integer, autoincrement=True, primary_key=True)
date = Column(String)
step_id = Column(String)
batch_size = Column(Integer, nullable=True)
bct = Column(Integer, nullable=True)
c_div_t = Column(Integer, nullable=True)
class MasterTableSchema(BaseModel):
batch_size: none_or_float
bct: none_or_float
c_div_t: none_or_float
from typing import Optional
from pydantic import BaseModel
from sqlalchemy import Column, Integer, String, Float
from scripts.db.psql.databases import Base
class DBModelProductionLosses(Base):
__tablename__ = "oee_production_losses"
id = Column(Integer, autoincrement=True, primary_key=True)
date = Column(String)
step_id = Column(String)
batch_no = Column(String, nullable=True)
step_no = Column(String, nullable=True)
stage_process = Column(String, nullable=True)
shift = Column(String, nullable=True)
asset_model = Column(String, nullable=True)
asset = Column(String, nullable=True)
asset_model_name = Column(String, nullable=True)
asset_name = Column(String, nullable=True)
loss_category = Column(String, nullable=True)
loss_reason = Column(String, nullable=True)
loss_reason_comment = Column(String, nullable=True)
description = Column(String, nullable=True)
loss_in_minutes = Column(Float, nullable=True)
from_time = Column(String, nullable=True)
to_time = Column(String, nullable=True)
unadjusted_loss_in_time = Column(Float, nullable=True)
booked_loss_in_time = Column(Float, nullable=True)
available_loss_in_minutes = Column(Float, nullable=True)
planned_loss_in_minutes = Column(Float, nullable=True)
class LossesStepData(BaseModel):
data_grid: Optional[list]
unadjusted_loss_in_time: Optional[float]
booked_loss_in_time: Optional[float]
available_loss_in_minutes: Optional[float]
planned_loss_in_minutes: Optional[float]
from typing import Optional, Union
from pydantic import BaseModel
from pydantic.validators import str_validator
from sqlalchemy import Column, Integer, String, Float
from scripts.db.psql.databases import Base
class DBModelSummary(Base):
__tablename__ = "ope_summary"
id = Column(Integer, autoincrement=True, primary_key=True)
date = Column(String)
step_id = Column(String)
capacity_for_shift = Column(Float, nullable=True)
actual_production = Column(Float, nullable=True)
ope = Column(Float, nullable=True)
production_loss = Column(Float, nullable=True)
loss_in_time = Column(Float, nullable=True)
booked_loss_in_time = Column(Float, nullable=True)
unadjusted_loss_in_time = Column(Float, nullable=True)
plant_capacity_feed = Column(Float, nullable=True)
norm = Column(Float, nullable=True)
capacity_a_b = Column(Float, nullable=True)
def empty_to_none(v: str) -> Optional[str]:
if v == '':
return None
return v
class EmptyStrToNone(str):
@classmethod
def __get_validators__(cls):
yield str_validator
yield empty_to_none
none_or_float = Union[None, float, EmptyStrToNone]
class SummarySchema(BaseModel):
capacity_for_shift: none_or_float
actual_production: none_or_float
ope: none_or_float
production_loss: none_or_float
loss_in_time: none_or_float
booked_loss_in_time: none_or_float
unadjusted_loss_in_time: none_or_float
plant_capacity_feed: none_or_float
norm: none_or_float
capacity_a_b: none_or_float
class ShiftSchema(BaseModel):
capacity_a_b: none_or_float
norm: none_or_float
plant_capacity_feed: none_or_float
capacity_for_shift: none_or_float
actual_production: none_or_float
ope: none_or_float
production_loss: none_or_float
loss_in_time: none_or_float
booked_loss_in_time: none_or_float
unadjusted_loss_in_time: none_or_float
class DBModelShiftDetails(Base):
__tablename__ = "ope_shift_wise_details"
id = Column(Integer, autoincrement=True, primary_key=True)
date = Column(String)
step_id = Column(String)
capacity_a_b = Column(String, nullable=True)
norm = Column(String, nullable=True)
plant_capacity_feed = Column(String, nullable=True)
capacity_for_shift = Column(Float, nullable=True)
actual_production = Column(Float, nullable=True)
ope = Column(Float, nullable=True)
production_loss = Column(Float, nullable=True)
loss_in_time = Column(Float, nullable=True)
shift = Column(String, nullable=True)
booked_loss_in_time = Column(Float, nullable=True)
unadjusted_loss_in_time = Column(Float, nullable=True)
from sqlalchemy import Column, Integer, String, Float
from scripts.db.psql.databases import Base
class DBModel(Base):
__tablename__ = "ope_production_losses"
id = Column(Integer, autoincrement=True, primary_key=True)
date = Column(String)
step_id = Column(String)
line = Column(String, nullable=True)
equipment = Column(String, nullable=True)
loss_category = Column(String, nullable=True)
loss_reason = Column(String, nullable=True)
loss_reason_comment = Column(String, nullable=True)
loss_in_minutes = Column(Float, nullable=True)
shift = Column(String, nullable=True)
from_time = Column(String, nullable=True)
to_time = Column(String, nullable=True)
from pydantic import BaseModel
from sqlalchemy import Column, Integer, Float
from scripts.db.psql.databases import Base
from . import none_or_float
class DBModelRMConsumption(Base):
__tablename__ = "rm_consumption"
id = Column(Integer, autoincrement=True, primary_key=True)
dfg_generation = Column(Float, nullable=True)
prm_consumption = Column(Float, nullable=True)
prm_actualconsumption = Column(Float, nullable=True)
prm_yield = Column(Float, nullable=True)
fresh_brm_consumption = Column(Float, nullable=True)
fresh_brm_actualconsumption = Column(Float, nullable=True)
fresh_brm_yield = Column(Float, nullable=True)
sol_1_consumption = Column(Float, nullable=True)
sol_1_actualconsumption = Column(Float, nullable=True)
sol_1_yield = Column(Float, nullable=True)
sol_2_consumption = Column(Float, nullable=True)
sol_2_actualconsumption = Column(Float, nullable=True)
sol_2_yield = Column(Float, nullable=True)
catalyst_consumption = Column(Float, nullable=True)
catalyst_actualconsumption = Column(Float, nullable=True)
catalyst_yield = Column(Float, nullable=True)
class RMConsumptionSchema(BaseModel):
dfg_generation: none_or_float
prm_consumption: none_or_float
prm_actualconsumption: none_or_float
prm_yield: none_or_float
fresh_brm_consumption: none_or_float
fresh_brm_actualconsumption: none_or_float
fresh_brm_yield: none_or_float
sol_1_consumption: none_or_float
sol_1_actualconsumption: none_or_float
sol_1_yield: none_or_float
sol_2_consumption: none_or_float
sol_2_actualconsumption: none_or_float
sol_2_yield: none_or_float
catalyst_consumption: none_or_float
catalyst_actualconsumption: none_or_float
catalyst_yield: none_or_float
from pydantic import BaseModel
from sqlalchemy import Column, Integer, Float
from scripts.db.psql.databases import Base
from . import none_or_float
class DBModelUtilStdNorm(Base):
__tablename__ = "util_std_norm"
id = Column(Integer, autoincrement=True, primary_key=True)
dfg_generation = Column(Float, nullable=True)
power_consumption = Column(Float, nullable=True)
power_actualconsumption = Column(Float, nullable=True)
power_yield = Column(Float, nullable=True)
steam_consumption = Column(Float, nullable=True)
steam_actualconsumption = Column(Float, nullable=True)
steam_yield = Column(Float, nullable=True)
nitrogen_consumption = Column(Float, nullable=True)
nitrogen_actualconsumption = Column(Float, nullable=True)
nitrogen_yield = Column(Float, nullable=True)
ng_consumption = Column(Float, nullable=True)
ng_actualconsumption = Column(Float, nullable=True)
ng_yield = Column(Float, nullable=True)
water_consumption = Column(Float, nullable=True)
water_actualconsumption = Column(Float, nullable=True)
water_yield = Column(Float, nullable=True)
condensate_generation_consumption = Column(Float, nullable=True)
condensate_generation_actualconsumption = Column(Float, nullable=True)
condensate_generation_yield = Column(Float, nullable=True)
class UtilStdNormSchema(BaseModel):
dfg_generation: none_or_float
power_consumption: none_or_float
power_actualconsumption: none_or_float
power_yield: none_or_float
steam_consumption: none_or_float
steam_actualconsumption: none_or_float
steam_yield: none_or_float
nitrogen_consumption: none_or_float
nitrogen_actualconsumption: none_or_float
nitrogen_yield: none_or_float
ng_consumption: none_or_float
ng_actualconsumption: none_or_float
ng_yield: none_or_float
water_consumption: none_or_float
water_actualconsumption: none_or_float
water_yield: none_or_float
condensate_generation_consumption: none_or_float
condensate_generation_actualconsumption: none_or_float
condensate_generation_yield: none_or_float
from sqlalchemy import create_engine, inspect
from sqlalchemy.orm import Session
from scripts.config.app_configurations import DBConf
from scripts.db.psql.models.gen_std_norm import DBModelGenStdNorm, GenStdNormSchema
from scripts.logging.logging import logging_config, logger
class QueryLayer:
def __init__(self, db: Session):
self.session: Session = db
self.echo = logging_config["level"].upper() == "DEBUG"
self.table_obj = DBModelGenStdNorm
self.create_table(self.table_obj)
def create_table(self, table):
try:
engine = create_engine(DBConf.ASSISTANT_DB_URI, echo=self.echo)
if not inspect(engine).has_table(table.__tablename__):
orm_table = table
orm_table.__table__.create(bind=engine, checkfirst=True)
except Exception as e:
logger.error(f"Error occurred during start-up: {e}", exc_info=True)
@property
def column_id(self):
return "id"
def add_to_table(self, records, step_id, date):
list_of_recs = list()
try:
for each in records:
summary_obj = GenStdNormSchema(**each).dict(exclude_none=True)
table_obj = self.table_obj(**summary_obj)
list_of_recs.append(table_obj)
return list_of_recs
except Exception as e:
logger.exception(f"Exception occurred while adding to postgres table {self.table_obj.__tablename__} {e}")
def insert_data(self, object_models_list, step_id, date):
try:
self.delete_data()
mappings = self.add_to_table(object_models_list, step_id, date)
self.session.bulk_save_objects(mappings)
self.session.commit()
return True
except TypeError:
raise
except Exception as e:
logger.exception(e)
raise
def delete_data(self):
try:
self.session.query(self.table_obj).delete()
self.session.commit()
return True
except Exception as e:
logger.error(f"Failed to delete existing data, {e}")
raise
from fastapi.encoders import jsonable_encoder
from sqlalchemy import create_engine, inspect
from sqlalchemy.orm import Session
from scripts.config.app_configurations import DBConf
from scripts.db.psql.models.mtd_oee_summary import DBModelMTDOEESummary, MTDOEESummarySchema
from scripts.logging.logging import logging_config, logger
class QueryLayer:
def __init__(self, db: Session):
self.session: Session = db
self.echo = logging_config["level"].upper() == "DEBUG"
self.table_mtd_summary = DBModelMTDOEESummary
self.create_table(self.table_mtd_summary)
def create_table(self, table):
try:
engine = create_engine(DBConf.ASSISTANT_DB_URI, echo=self.echo)
if not inspect(engine).has_table(table.__tablename__):
orm_table = table
orm_table.__table__.create(bind=engine, checkfirst=True)
except Exception as e:
logger.error(f"Error occurred during start-up: {e}", exc_info=True)
def add_to_table(self, records, step_id, date):
list_of_recs = list()
try:
for each in records:
summary_obj = MTDOEESummarySchema(**each).dict(exclude_none=True)
existing_data = self.session.query(self.table_mtd_summary) \
.filter(DBModelMTDOEESummary.step_id == step_id,
DBModelMTDOEESummary.date == date).first()
existing_data = jsonable_encoder(existing_data)
if existing_data:
self.session.query(self.table_mtd_summary).filter(DBModelMTDOEESummary.step_id == step_id,
DBModelMTDOEESummary.date == date).update(
summary_obj)
self.session.commit()
self.session.flush()
else:
table_obj = DBModelMTDOEESummary(**summary_obj, step_id=step_id,
date=date)
list_of_recs.append(table_obj)
return list_of_recs
except Exception as e:
logger.exception(f"Exception occurred while adding to postgres table {e}")
def insert_data(self, object_models_list, step_id, date):
try:
mappings = self.add_to_table(object_models_list, step_id, date)
self.session.bulk_save_objects(mappings)
self.session.commit()
self.session.flush()
return True
except TypeError:
raise
except Exception as e:
logger.exception(e)
raise
from fastapi.encoders import jsonable_encoder
from sqlalchemy import create_engine, inspect
from sqlalchemy.orm import Session
from scripts.config.app_configurations import DBConf
from scripts.db.psql.models.oee_daily_production import DBModelDailyProduction, DailyProductionSchema
from scripts.logging.logging import logging_config, logger
class QueryLayer:
def __init__(self, db: Session):
self.session: Session = db
self.echo = logging_config["level"].upper() == "DEBUG"
self.table_daily_production = DBModelDailyProduction
self.create_table(self.table_daily_production)
def create_table(self, table):
try:
engine = create_engine(DBConf.ASSISTANT_DB_URI, echo=self.echo)
if not inspect(engine).has_table(table.__tablename__):
orm_table = table
orm_table.__table__.create(bind=engine, checkfirst=True)
except Exception as e:
logger.error(f"Error occurred during start-up: {e}", exc_info=True)
def add_to_table(self, records, step_id, date):
list_of_recs = list()
try:
for each in records:
daily_prod_obj = DailyProductionSchema(**each).dict(exclude_none=True)
existing_data = self.session.query(self.table_daily_production) \
.filter(DBModelDailyProduction.step_id == step_id,
DBModelDailyProduction.date == date).first()
existing_data = jsonable_encoder(existing_data)
if existing_data:
self.session.query(self.table_daily_production).filter(DBModelDailyProduction.step_id == step_id,
DBModelDailyProduction.date == date).update(
daily_prod_obj)
self.session.commit()
self.session.flush()
else:
table_obj = DBModelDailyProduction(**daily_prod_obj, step_id=step_id,
date=date)
list_of_recs.append(table_obj)
return list_of_recs
except Exception as e:
logger.exception(f"Exception occurred while adding to postgres table {e}")
def insert_data(self, object_models_list, step_id, date):
try:
self.delete_data(step_id, date)
mappings = self.add_to_table(object_models_list, step_id, date)
self.session.bulk_save_objects(mappings)
self.session.commit()
return True
except TypeError:
raise
except Exception as e:
logger.exception(e)
raise
def delete_data(self, step_id, date):
try:
self.session.query(self.table_daily_production) \
.filter(self.table_daily_production.step_id == step_id, self.table_daily_production.date == date) \
.delete()
self.session.commit()
return True
except Exception as e:
logger.error(f"Failed to delete existing data, {e}")
raise
from fastapi.encoders import jsonable_encoder
from sqlalchemy import create_engine, inspect
from sqlalchemy.orm import Session
from scripts.config.app_configurations import DBConf
from scripts.db.psql.models.oee_formula_calculation import DBModelOEESummary, OEESummarySchema
from scripts.logging.logging import logging_config, logger
class QueryLayer:
def __init__(self, db: Session):
self.session: Session = db
self.echo = logging_config["level"].upper() == "DEBUG"
self.table_summary = DBModelOEESummary
self.create_table(self.table_summary)
def create_table(self, table):
try:
engine = create_engine(DBConf.ASSISTANT_DB_URI, echo=self.echo)
if not inspect(engine).has_table(table.__tablename__):
orm_table = table
orm_table.__table__.create(bind=engine, checkfirst=True)
except Exception as e:
logger.error(f"Error occurred during start-up: {e}", exc_info=True)
@property
def column_id(self):
return "id"
@property
def column_date(self):
return "date"
@property
def column_step_id(self):
return "step_id"
def add_to_table(self, records, step_id, date):
list_of_recs = list()
try:
for each in records:
summary_obj = OEESummarySchema(**each).dict(exclude_none=True)
existing_data = self.session.query(self.table_summary) \
.filter(DBModelOEESummary.step_id == step_id,
DBModelOEESummary.date == date).first()
existing_data = jsonable_encoder(existing_data)
if existing_data:
self.session.query(self.table_summary).filter(DBModelOEESummary.step_id == step_id,
DBModelOEESummary.date == date).update(
summary_obj)
self.session.commit()
self.session.flush()
else:
table_obj = DBModelOEESummary(**summary_obj, step_id=step_id,
date=date)
list_of_recs.append(table_obj)
return list_of_recs
except Exception as e:
logger.exception(f"Exception occurred while adding to postgres table {e}")
def insert_data(self, object_models_list, step_id, date):
try:
mappings = self.add_to_table(object_models_list, step_id, date)
self.session.bulk_save_objects(mappings)
self.session.commit()
return True
except TypeError:
raise
except Exception as e:
logger.exception(e)
raise
def delete_data(self, step_id, date):
try:
self.session.query(self.table_summary) \
.filter(self.table_summary.step_id == step_id, self.table_summary.date == date) \
.delete()
self.session.commit()
return True
except Exception as e:
logger.error(f"Failed to delete existing data, {e}")
raise
from fastapi.encoders import jsonable_encoder
from sqlalchemy import create_engine, inspect
from sqlalchemy.orm import Session
from scripts.config.app_configurations import DBConf
from scripts.db.psql.models.oee_master_table import DBModelMaster, MasterTableSchema
from scripts.logging.logging import logging_config, logger
class QueryLayer:
def __init__(self, db: Session):
self.session: Session = db
self.echo = logging_config["level"].upper() == "DEBUG"
self.table_master = DBModelMaster
self.create_table(self.table_master)
def create_table(self, table):
try:
engine = create_engine(DBConf.ASSISTANT_DB_URI, echo=self.echo)
if not inspect(engine).has_table(table.__tablename__):
orm_table = table
orm_table.__table__.create(bind=engine, checkfirst=True)
except Exception as e:
logger.error(f"Error occurred during start-up: {e}", exc_info=True)
@property
def column_id(self):
return "id"
@property
def column_date(self):
return "date"
@property
def column_step_id(self):
return "step_id"
def add_to_table(self, records, step_id, date):
list_of_recs = list()
try:
for each in records:
summary_obj = MasterTableSchema(**each).dict(exclude_none=True)
existing_data = self.session.query(self.table_master) \
.filter(DBModelMaster.step_id == step_id,
DBModelMaster.date == date).first()
existing_data = jsonable_encoder(existing_data)
if existing_data:
self.session.query(self.table_master).filter(DBModelMaster.step_id == step_id,
DBModelMaster.date == date).update(
summary_obj)
self.session.commit()
self.session.flush()
else:
table_obj = DBModelMaster(**summary_obj, step_id=step_id,
date=date)
list_of_recs.append(table_obj)
return list_of_recs
except Exception as e:
logger.exception(f"Exception occurred while adding to postgres table {e}")
def insert_data(self, object_models_list, step_id, date):
try:
mappings = self.add_to_table(object_models_list, step_id, date)
self.session.bulk_save_objects(mappings)
self.session.commit()
return True
except TypeError:
raise
except Exception as e:
logger.exception(e)
raise
from copy import copy
from sqlalchemy import create_engine, inspect
from sqlalchemy.orm import Session
from scripts.config.app_configurations import DBConf
from scripts.db.psql.models.oee_production_losses import DBModelProductionLosses, LossesStepData
from scripts.logging.logging import logger, logging_config
class QueryLayer:
def __init__(self, db: Session):
self.session: Session = db
self.table = DBModelProductionLosses
self.echo = logging_config["level"].upper() == "DEBUG"
self.create_table(self.table.__tablename__)
def create_table(self, table_name):
try:
engine = create_engine(DBConf.ASSISTANT_DB_URI, echo=self.echo)
if not inspect(engine).has_table(table_name):
orm_table = self.table
orm_table.__table__.create(bind=engine, checkfirst=True)
except Exception as e:
logger.error(f"Error occurred during start-up: {e}", exc_info=True)
@property
def column_asset(self):
return "asset"
@property
def column_asset_model(self):
return "asset_model"
@property
def column_unadjusted_loss_in_time(self):
return "unadjusted_loss_in_time"
@property
def column_booked_loss_in_time(self):
return "booked_loss_in_time"
def add_to_table(self, input_data, step_id, date):
list_of_recs = list()
try:
data = LossesStepData(**input_data[0]).dict(exclude_none=True)
for each in data.get('data_grid'):
each_record = copy(each)
each_record.pop('assets', None)
table_obj = self.table(**each_record, step_id=step_id, date=date)
table_obj.unadjusted_loss_in_time = data.get('unadjusted_loss_in_time')
table_obj.booked_loss_in_time = data.get('booked_loss_in_time')
table_obj.planned_loss_in_minutes = data.get('planned_loss_in_minutes')
table_obj.available_loss_in_minutes = data.get('available_loss_in_minutes')
list_of_recs.append(table_obj)
return list_of_recs
except TypeError:
raise
except Exception as e:
logger.exception(f"Exception occurred while adding to postgres table {e}")
def insert_data(self, object_models_list, step_id, date):
try:
self.delete_data(step_id, date)
mappings = self.add_to_table(object_models_list, step_id, date)
self.session.bulk_save_objects(mappings)
self.session.commit()
return True
except TypeError:
raise
except Exception as e:
logger.exception(e)
raise
def delete_data(self, step_id, date):
try:
self.session.query(self.table) \
.filter(self.table.step_id == step_id, self.table.date == date) \
.delete()
self.session.commit()
return True
except Exception as e:
logger.error(f"Failed to delete existing data, {e}")
raise
from fastapi.encoders import jsonable_encoder
from sqlalchemy import create_engine, inspect
from sqlalchemy.orm import Session
from scripts.config.app_configurations import DBConf
from scripts.db.psql.models.ope_formula_calculation import *
from scripts.logging.logging import logger, logging_config
class QueryLayer:
def __init__(self, db: Session):
self.session: Session = db
self.table_summary = DBModelSummary
self.shift_wise_summary = DBModelShiftDetails
self.echo = logging_config["level"].upper() == "DEBUG"
self.create_table(self.table_summary)
self.create_table(self.shift_wise_summary)
def create_table(self, table):
try:
engine = create_engine(DBConf.ASSISTANT_DB_URI, echo=self.echo)
if not inspect(engine).has_table(table.__tablename__):
orm_table = table
orm_table.__table__.create(bind=engine, checkfirst=True)
except Exception as e:
logger.error(f"Error occurred during start-up: {e}", exc_info=True)
@property
def column_id(self):
return "id"
@property
def column_date(self):
return "date"
@property
def column_step_id(self):
return "step_id"
@property
def column_unadjusted_loss_in_time(self):
return "unadjusted_loss_in_time"
@property
def column_booked_loss_in_time(self):
return "booked_loss_in_time"
def add_to_table(self, records, step_id, date):
list_of_recs = list()
try:
for each in records:
for shift in ["summary", "shift_a", "shift_b", "shift_c"]:
shift_obj = {x.split(f"_{shift}")[0]: y for x, y in each.items() if shift in x}
if not shift_obj:
continue
if shift == "summary":
summary_obj = SummarySchema(**shift_obj).dict(exclude_none=True)
existing_data = self.session.query(self.table_summary) \
.filter(DBModelSummary.step_id == step_id,
DBModelSummary.date == date).first()
existing_data = jsonable_encoder(existing_data)
if existing_data:
self.session.query(self.table_summary).filter(DBModelSummary.step_id == step_id,
DBModelSummary.date == date).update(
summary_obj)
self.session.commit()
self.session.flush()
else:
table_obj = DBModelSummary(**summary_obj, step_id=step_id,
date=date)
list_of_recs.append(table_obj)
else:
shift_in_db = shift.split("shift_")[1].upper()
shift_model = SummarySchema(**shift_obj).dict(exclude_none=True)
table_obj = DBModelShiftDetails(**shift_model, step_id=step_id, date=date, shift=shift_in_db)
list_of_recs.append(table_obj)
return list_of_recs
except TypeError:
raise
except Exception as e:
logger.exception(f"Exception occurred while adding to postgres table {e}")
def insert_data(self, object_models_list, step_id, date):
try:
self.delete_shift_data(step_id, date)
mappings = self.add_to_table(object_models_list, step_id, date)
self.session.bulk_save_objects(mappings)
self.session.commit()
return True
except TypeError:
raise
except Exception as e:
logger.exception(e)
raise
def delete_shift_data(self, step_id, date):
try:
self.session.query(self.shift_wise_summary) \
.filter(self.shift_wise_summary.step_id == step_id, self.shift_wise_summary.date == date) \
.delete()
self.session.commit()
return True
except Exception as e:
logger.error(e)
raise
def update_losses(self, unaccounted_losses, booked_loss, date, step_id):
try:
existing_data = self.session.query(self.table_summary) \
.filter(self.table_summary.date == date,
self.table_summary.step_id == step_id).first()
if not existing_data:
table_obj = DBModelSummary(step_id=step_id, date=date, unadjusted_loss_in_time=unaccounted_losses,
booked_loss_in_time=booked_loss)
self.session.bulk_save_objects([table_obj])
return False
existing_data_dict = jsonable_encoder(existing_data)
existing_data_dict.update({self.column_unadjusted_loss_in_time: unaccounted_losses,
self.column_booked_loss_in_time: booked_loss})
for field in jsonable_encoder(existing_data):
if field in existing_data_dict:
setattr(existing_data, field, existing_data_dict[field])
self.session.commit()
self.session.flush()
return True
except Exception as e:
logger.error(e)
raise
from sqlalchemy import create_engine, inspect
from sqlalchemy.orm import Session
from scripts.config.app_configurations import DBConf
from scripts.db.psql.models.production_losses import DBModel
from scripts.db.psql.query_layer.ope_formula_calculation import QueryLayer as OPEQueryLayer
from scripts.logging.logging import logger, logging_config
class QueryLayer:
def __init__(self, db: Session):
self.session: Session = db
self.table = DBModel
self.echo = logging_config["level"].upper() == "DEBUG"
self.create_table(self.table.__tablename__)
def create_table(self, table_name):
try:
engine = create_engine(DBConf.ASSISTANT_DB_URI, echo=self.echo)
if not inspect(engine).has_table(table_name):
orm_table = self.table
orm_table.__table__.create(bind=engine, checkfirst=True)
except Exception as e:
logger.error(f"Error occurred during start-up: {e}", exc_info=True)
@property
def column_id(self):
return "id"
@property
def column_date(self):
return "date"
@property
def column_step_id(self):
return "step_id"
@property
def column_line(self):
return "line"
@property
def column_equipment(self):
return "equipment"
@property
def column_loss_category(self):
return "loss_category"
@property
def column_loss_reason(self):
return "loss_reason"
@property
def column_loss_reason_comment(self):
return "loss_reason_comment"
@property
def column_loss_in_minutes(self):
return "loss_in_minutes"
def add_to_table(self, input_tuple, step_id, date):
list_of_recs = list()
try:
dg, unaccounted_loss, booked_loss, refer_step = input_tuple
OPEQueryLayer(self.session).update_losses(unaccounted_loss, booked_loss, date, refer_step)
for each in dg:
each["shift"] = each["shifts"]
each.pop("shifts")
table_obj = self.table(**each, step_id=step_id, date=date)
list_of_recs.append(table_obj)
return list_of_recs
except TypeError:
raise
except Exception as e:
logger.exception(f"Exception occurred while adding to postgres table {e}")
def insert_data(self, object_models_list, step_id, date):
try:
self.delete_data(step_id, date)
mappings = self.add_to_table(object_models_list, step_id, date)
self.session.bulk_save_objects(mappings)
self.session.commit()
return True
except TypeError:
raise
except Exception as e:
logger.exception(e)
raise
def delete_data(self, step_id, date):
try:
self.session.query(self.table) \
.filter(self.table.step_id == step_id, self.table.date == date) \
.delete()
self.session.commit()
return True
except Exception as e:
logger.error(f"Failed to delete existing data, {e}")
raise
from sqlalchemy import create_engine, inspect
from sqlalchemy.orm import Session
from scripts.config.app_configurations import DBConf
from scripts.db.psql.models.rm_consumption import DBModelRMConsumption, RMConsumptionSchema
from scripts.logging.logging import logging_config, logger
class QueryLayer:
def __init__(self, db: Session):
self.session: Session = db
self.echo = logging_config["level"].upper() == "DEBUG"
self.table_obj = DBModelRMConsumption
self.create_table(self.table_obj)
def create_table(self, table):
try:
engine = create_engine(DBConf.ASSISTANT_DB_URI, echo=self.echo)
if not inspect(engine).has_table(table.__tablename__):
orm_table = table
orm_table.__table__.create(bind=engine, checkfirst=True)
except Exception as e:
logger.error(f"Error occurred during start-up: {e}", exc_info=True)
@property
def column_id(self):
return "id"
def add_to_table(self, records, step_id, date):
list_of_recs = list()
try:
for each in records:
summary_obj = RMConsumptionSchema(**each).dict(exclude_none=True)
table_obj = self.table_obj(**summary_obj)
list_of_recs.append(table_obj)
return list_of_recs
except Exception as e:
logger.exception(f"Exception occurred while adding to postgres table {self.table_obj.__tablename__} {e}")
def insert_data(self, object_models_list, step_id, date):
try:
self.delete_data()
mappings = self.add_to_table(object_models_list, step_id, date)
self.session.bulk_save_objects(mappings)
self.session.commit()
return True
except TypeError:
raise
except Exception as e:
logger.exception(e)
raise
def delete_data(self):
try:
self.session.query(self.table_obj).delete()
self.session.commit()
return True
except Exception as e:
logger.error(f"Failed to delete existing data, {e}")
raise
from sqlalchemy import create_engine, inspect
from sqlalchemy.orm import Session
from scripts.config.app_configurations import DBConf
from scripts.db.psql.models.util_std_norm import UtilStdNormSchema, DBModelUtilStdNorm
from scripts.logging.logging import logging_config, logger
class QueryLayer:
def __init__(self, db: Session):
self.session: Session = db
self.echo = logging_config["level"].upper() == "DEBUG"
self.table_obj = DBModelUtilStdNorm
self.create_table(self.table_obj)
def create_table(self, table):
try:
engine = create_engine(DBConf.ASSISTANT_DB_URI, echo=self.echo)
if not inspect(engine).has_table(table.__tablename__):
orm_table = table
orm_table.__table__.create(bind=engine, checkfirst=True)
except Exception as e:
logger.error(f"Error occurred during start-up: {e}", exc_info=True)
@property
def column_id(self):
return "id"
def add_to_table(self, records, step_id, date):
list_of_recs = list()
try:
for each in records:
summary_obj = UtilStdNormSchema(**each).dict(exclude_none=True)
table_obj = self.table_obj(**summary_obj)
list_of_recs.append(table_obj)
return list_of_recs
except Exception as e:
logger.exception(f"Exception occurred while adding to postgres table {self.table_obj.__tablename__} {e}")
def insert_data(self, object_models_list, step_id, date):
try:
self.delete_data()
mappings = self.add_to_table(object_models_list, step_id, date)
self.session.bulk_save_objects(mappings)
self.session.commit()
return True
except TypeError:
raise
except Exception as e:
logger.exception(e)
raise
def delete_data(self):
try:
self.session.query(self.table_obj).delete()
self.session.commit()
return True
except Exception as e:
logger.error(f"Failed to delete existing data, {e}")
raise
import redis
from scripts.config.app_configurations import RedisConfig, KafkaConf
login_db = redis.from_url(RedisConfig.uri, db=int(RedisConfig.login_db), decode_responses=True)
project_details_db = redis.from_url(RedisConfig.uri, db=int(RedisConfig.project_tags_db), decode_responses=True)
partition_db = redis.from_url(RedisConfig.uri, db=int(KafkaConf.redis_db), decode_responses=True)
class InternalError(Exception):
pass
class UnauthorizedError(Exception):
pass
class ProjectIdError(Exception):
pass
class ILensPermissionError(Exception):
pass
class DuplicateTemplateNameError(Exception):
pass
class DuplicateWorkflowNameError(Exception):
pass
class ImplementationError(Exception):
pass
class RestrictBlanks(Exception):
pass
class StepsNotConfigured(Exception):
pass
class DuplicateLogbookNameError(Exception):
pass
class LeftNavigationNotPresent(Exception):
pass
class RequiredFieldMissing(Exception):
pass
class BulkUploadError(Exception):
pass
class ColumnsMisMatch(Exception):
pass
class InvalidValueFound(Exception):
pass
class QuantityGreaterThanException(Exception):
pass
class ILensErrors(Exception):
def __init__(self, msg):
Exception.__init__(self, msg)
"""
Base Error Class
"""
class ErrorCodes:
ERR001 = "ERR001 - Operating Time is greater than Planned Time"
ERR002 = "ERR002 - Zero Values are not allowed"
ERR003 = "ERR003 - Operating Time is less than Productive Time"
ERR004 = "ERR004 - Rejected Units is greater than Total Units"
class DowntimeResponseError(ILensErrors):
"""
Error Occurred during fetch of downtime
"""
class AuthenticationError(ILensErrors):
"""
JWT Authentication Error
"""
class ErrorMessages:
ERROR001 = "Authentication Failed. Please verify token"
ERROR002 = "Signature Expired"
ERROR003 = "Signature Not Valid"
logger:
name: form-management
level: DEBUG
handlers:
- type: RotatingFileHandler
max_bytes: 100000000
back_up_count: 5
- type: SocketHandler
host: localhost
port: 23582
- type: StreamHandler
name: ebpr-engine
import logging.handlers
import os
import sys
import time
from logging import StreamHandler
from scripts.config import app_configurations
LOG_HANDLERS = ["console","file"]
log_level = "INFO"
#log_file = os.path.join(app_configurations.LOG_FILE_NAME + "_" + time.strftime("%Y%m%d") + '.log')
logger = logging.getLogger("structure")
logger.setLevel(log_level)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(filename)s - %(module)s: %(funcName)s: '
'%(lineno)d - %(message)s')
if 'console' in LOG_HANDLERS:
# Adding the log Console handler to the logger
console_handler = StreamHandler(sys.stdout)
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
\ No newline at end of file
from fastapi import Request
from scripts.config.app_configurations import PathToServices
from scripts.constants.api import FormEndPoints
from scripts.core.handlers.form_handler import FormHandler
from scripts.core.handlers.stage_handler import StageHandler
from scripts.core.schemas.forms import SaveForm
from scripts.core.schemas.mobile import GetMultiFormData
from scripts.core.schemas.stages import StagesList
from scripts.db import TaskInstance, mongo_client
from scripts.logging.logging import logger
from scripts.utils.common_utils import CommonUtils
class TaskHandlerMobile:
def __init__(self, project_id=None):
self.comm_utils = CommonUtils(project_id=project_id)
self.mobile_task_details_api = f"{PathToServices.WORKFLOW_MT}{FormEndPoints.api_wrk_task_details}"
self.tasks_instance_conn = TaskInstance(mongo_client=mongo_client, project_id=project_id)
self.stage_handler = StageHandler(project_id=project_id)
self.form_handler = FormHandler(project_id=project_id)
async def multi_form_load(self, request_data: GetMultiFormData, request_obj: Request):
try:
logger.debug(f"RESTAPI POST PAYLOAD: {request_data.dict()}")
headers = {
'login-token': request_obj.headers.get('login-token', request_obj.cookies.get('login-token')),
'projectId': request_obj.cookies.get("projectId", request_obj.cookies.get("project_id",
request_obj.headers.get(
"projectId"))),
'userId': request_obj.cookies.get("user_id",
request_obj.cookies.get("userId", request_obj.headers.get("userId")))}
task_details = self.comm_utils.hit_external_service(api_url=self.mobile_task_details_api,
request_cookies=headers,
payload=request_data.dict())
task_data = self.tasks_instance_conn.find_by_task_id(request_data.task_id)
input_stage_list = dict(task_id=task_data.task_id, workflow_id=task_data.associated_workflow_id,
task_status=task_data.current_status,
workflow_version=task_data.associated_workflow_version,
project_id=task_data.project_id, mobile=True)
left_nav_data = self.stage_handler.get_stages_list(StagesList(**input_stage_list), nav_type='left',
user_id=request_data.user_id)
task_info_data = {}
for each in left_nav_data.get('stages'):
if each.get('stage_id'):
form_input = dict(project_id=task_data.project_id, task_id=task_data.task_id,
stage_id=each.get('stage_id'), triggers={})
each_form_data = await self.form_handler.form_renderer(SaveForm(**form_input), request_data.user_id,
request_obj)
each.update(form_data=each_form_data[1])
task_info_data |= task_details.get('data')
task_info_data.update(dict(task_steps_data=left_nav_data))
return task_info_data
except Exception as e:
logger.error(f"Exception while saving record {str(e)}")
raise
import traceback
from fastapi import APIRouter, Depends, Request
from scripts.constants.api import FormEndPoints
from scripts.core.schemas.mobile import GetMultiFormData
from scripts.core.schemas.response_models import DefaultMobileResponse, DefaultMobileFailureResponse
from scripts.logging.logging import logger
from scripts.mobility.handlers.task_handler import TaskHandlerMobile
from scripts.utils.security_utils.decorators import CookieAuthentication
from scripts.utils.security_utils.project_decorator import MetaInfoCookie, MetaInfoSchema
mobile_task_router = APIRouter(tags=["Form services"], prefix=FormEndPoints.api_mobile)
auth = CookieAuthentication()
get_cookies = MetaInfoCookie()
@mobile_task_router.post(FormEndPoints.api_mobile_form_multiple)
async def mobile_multi_form(request_data: GetMultiFormData, request_obj: Request,
meta: MetaInfoSchema = Depends(get_cookies)):
try:
task_handler_mobile = TaskHandlerMobile(project_id=request_data.project_id)
response = await task_handler_mobile.multi_form_load(request_data, request_obj)
return DefaultMobileResponse(status=True, message="success", data=response).dict()
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultMobileFailureResponse(error=e.args, message="Error encountered in rendering form")
import os
import socket
import time
from copy import deepcopy
from datetime import datetime, timedelta
import httpx
import pytz
from dateutil import parser
from fastapi import Request
from scripts.config.app_configurations import KafkaConf, PathToServices
from scripts.constants import CommonKeys, CommonConstants
from scripts.constants.api import EventsEndPoints
from scripts.constants.app_constants import CommonStatusCode
from scripts.constants.date_constants import ui_time_format_data
from scripts.core.engine.task_engine import TaskEngine
from scripts.core.schemas.forms import CustomActionsModel
from scripts.core.schemas.other_schemas import ExternRequest
from scripts.db import mongo_client, TaskInstanceData, Trigger, TaskInstance
from scripts.db.mongo.ilens_assistant.collections.logbook import LogbookInfo
from scripts.db.mongo.ilens_configuration.aggregations.config_aggregate import ConfigAggregate
from scripts.db.mongo.ilens_configuration.collections.customer_projects import CustomerProjects
from scripts.db.mongo.ilens_configuration.collections.lookup_table import LookupTable
from scripts.db.mongo.ilens_configuration.collections.shifts import Shifts
from scripts.db.mongo.ilens_configuration.collections.unique_id import UniqueIdSchema, UniqueId
from scripts.db.mongo.ilens_configuration.collections.user import User
from scripts.db.mongo.ilens_configuration.collections.user_project import UserProject
from scripts.logging.logging import logger
from scripts.utils.ilens_publish_data import KairosWriter
class CommonUtils(CommonKeys):
def __init__(self, project_id=None):
self.user_conn = User(mongo_client)
self.user_proj = UserProject(mongo_client)
self.unique_con = UniqueId(mongo_client, project_id=project_id)
self.events_api = f"{PathToServices.ILENS_EVENTS}{EventsEndPoints.api_create_event}"
self.task_engine = TaskEngine(project_id=project_id)
self.logbook_conn = LogbookInfo(mongo_client=mongo_client, project_id=project_id)
self.task_inst_data = TaskInstanceData(mongo_client, project_id=project_id)
self.trigger_conn = Trigger(mongo_client, project_id=project_id)
self.task_instance_conn = TaskInstance(mongo_client, project_id=project_id)
self.customer_projects_con = CustomerProjects(mongo_client=mongo_client)
self.config_aggregate = ConfigAggregate()
self.lookup_data_conn = LookupTable(mongo_client, project_id=project_id)
self.default_code = os.environ.get("DEFAULT_EVENT_CODE", "")
@staticmethod
def get_time_now():
return time.time() * 1000
@staticmethod
def get_ip_of_user():
hostname = socket.gethostname()
return socket.gethostbyname(hostname)
@staticmethod
def meta_composer(user_id, is_update: bool = False):
if is_update:
meta_dict = dict(updated_by=user_id,
updated_at=int(time.time()))
return meta_dict
meta_dict = dict(created_by=user_id,
created_at=int(time.time()))
return meta_dict
@staticmethod
def get_time_in_ms():
return int(time.time() * 1000)
def get_user_roles(self, user_id):
user_rec = self.user_conn.find_user(user_id)
user_rec = user_rec if bool(user_rec) else {}
return user_rec.get("userrole", [])
@staticmethod
def get_time_by_ts(timestamp, timezone, time_format=None):
if time_format:
return str(datetime.fromtimestamp(timestamp, pytz.timezone(timezone)).strftime(time_format))
return datetime.fromtimestamp(timestamp, pytz.timezone(timezone))
@staticmethod
def convert_str_to_ts(_date, _time, _format, tz):
localized_tz = pytz.timezone(tz)
datetime_with_tz = datetime.strptime(f"{_date} {_time}", _format)
return int(localized_tz.localize(datetime_with_tz).timestamp()) * 1000
def get_user_meta(self, user_id=None, check_flag=False):
data_for_meta = {}
if check_flag:
data_for_meta[self.KEY_CREATED_BY] = user_id
data_for_meta[self.KEY_CREATED_TIME] = int(time.time() * 1000)
data_for_meta[self.KEY_UPDATED_AT] = user_id
data_for_meta[self.KEY_LAST_UPDATED_TIME] = int(time.time() * 1000)
return data_for_meta
def get_user_name_from_id(self, user_id):
user = self.user_conn.find_user(user_id)
return user.get("name", "") if bool(user) else ""
@staticmethod
def time_zone_converter(epoch_ts, tz, to_format=None):
date = datetime.fromtimestamp(epoch_ts // 1000, tz=pytz.timezone(tz))
return str(date.strftime(to_format)) if to_format else date
@staticmethod
def add_days_to_epoch(days, ts, tz):
current_datetime = datetime.fromtimestamp(ts // 1000, pytz.timezone(tz))
new = current_datetime + timedelta(days=days)
return int(new.timestamp() * 1000)
@staticmethod
def get_next_date(_date, _format, num):
next_date = datetime.strptime(_date, ui_time_format_data[_format]) + timedelta(days=num)
return next_date.strftime(ui_time_format_data[_format])
@staticmethod
def convert_trigger_date_to_epoch(triggers, request_data=None):
if utc_date := triggers.get("date"):
epoch_trigger = parser.parse(utc_date).timestamp() * 1000
if request_data:
request_data.date = epoch_trigger
return epoch_trigger
for each in triggers.keys():
if "date" in each:
utc_date = triggers[each]
epoch_trigger = parser.parse(utc_date).timestamp() * 1000
if request_data:
request_data.date = epoch_trigger
return epoch_trigger
return False
def get_trigger_in_epoch(self, triggers, submitted_data, field_props):
if epoch_value := self.convert_trigger_date_to_epoch(triggers):
return epoch_value
if not all([submitted_data, "data" in submitted_data, submitted_data.get("data")]):
return False
trigger_prop_dict = {x: y for x, y in field_props.items() if
"triggerOnChange" in y.keys() and y["triggerOnChange"] == "true"}
for each in submitted_data["data"].keys():
if "date" in each and each in trigger_prop_dict:
utc_date = submitted_data["data"][each]
return parser.parse(utc_date).timestamp() * 1000
return False
@staticmethod
def get_hierarchy_name(input_data: str, site_data: dict):
final_response = str()
try:
hierarchy_type = input_data.split("_")[0]
hierarchy_id = f"{hierarchy_type}_id"
hierarchy_name = f"{hierarchy_type}_name"
if hierarchy_type == "site":
return_data = site_data.get("site_name", str())
return return_data
return_data = deepcopy(site_data.get(hierarchy_type))
for each_data in return_data:
if each_data[hierarchy_id] == input_data:
final_response = each_data.get(hierarchy_name)
break
return final_response
except Exception as e:
logger.error(f"Error while fetching hierarchy details:{str(e)}")
return final_response
@staticmethod
def auditing_with_kafka(audits):
try:
kairos_writer = KairosWriter()
logger.debug(f"Data going to kafka writer in audit logs, len: {len(audits)}")
kairos_writer.audit_data(audits, KafkaConf.audit_topic)
logger.debug("Audited data successfully")
except Exception as e:
logger.error("Failed in auditing_with_kafka", e)
@staticmethod
def publish_data_to_kafka(tag_dict, project_id):
try:
kairos_writer = KairosWriter()
logger.debug(f"Data going to kafka writer, len: {len(tag_dict)}")
midnight = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0).timestamp() * 1000
# To avoid rule execution on backdated timestamps
backdated = {ts: data for ts, data in tag_dict.items() if ts < midnight and data}
non_backdated = {ts: data for ts, data in tag_dict.items() if ts >= midnight and data}
if backdated:
kairos_writer.write_data(backdated, KafkaConf.backdated_topic, project_id)
logger.debug("Published to backdated topic successfully")
if non_backdated:
kairos_writer.write_data(non_backdated, KafkaConf.topic, project_id)
logger.debug("Published to non-backdated topic successfully")
except Exception as e:
logger.error("Failed in publish_data_to_kafka", e)
@staticmethod
def get_updated_reference_data(records: list):
try:
if bool(records):
if len(records) == 1:
return records[0]
final_dict = records[0]
temp_dict = {}
for _data in records[1:]:
temp_dict |= _data["data"]
temp_dict.update(final_dict["data"])
final_dict["data"] = temp_dict
return final_dict
return {}
except Exception as e:
logger.exception(f"Exception occurred while fetching the reference data {e}")
return {}
def get_user_roles_by_project_id(self, user_id, project_id):
user_rec = self.user_conn.find_user_by_project_id(user_id=user_id, project_id=project_id)
user_rec = user_rec if bool(user_rec) else {}
if not user_rec:
user_rec = self.user_proj.fetch_user_project(user_id=user_id, project_id=project_id)
user_rec = user_rec if bool(user_rec) else {}
return user_rec.get("userrole", [])
def get_next_id(self, _param):
my_dict = UniqueIdSchema(key=_param)
my_doc = self.unique_con.find_one_record(key=_param)
if not my_doc.id:
my_dict.id = "100"
return self.unique_con.insert_record(my_dict)
else:
count_value = str(int(my_doc.id) + 1)
my_dict.id = count_value
return self.unique_con.update_record(my_dict)
@staticmethod
def get_iso_format(timestamp, timezone='UTC', timeformat=CommonConstants.__iso_format__):
return datetime.fromtimestamp(timestamp, pytz.timezone(timezone)).strftime(
timeformat) if timeformat else datetime.fromtimestamp(timestamp, pytz.timezone(timezone))
def get_shift(self, project_id, from_time: str, end_time: str):
try:
shifts_con = Shifts(mongo_client, project_id)
shift_data = shifts_con.find_shifts_by_project_id(project_id=project_id)
shift = ""
for each in shift_data:
if activities := each.get("activities", []):
shift_start_time = activities[0].get("shift_start")
shift_end_time = activities[0].get("shift_end")
if self.in_shift(from_time, end_time, shift_start_time, shift_end_time):
shift = each.get("shift_name", "")
return shift
return shift
except Exception as e:
logger.exception(e)
raise
@staticmethod
def in_shift(*args):
now = datetime.now()
args_modified = []
for each in args:
if each:
hour_minute = each.split(":")
args_modified.append(now.replace(hour=int(hour_minute[0]), minute=int(hour_minute[1]), second=0,
microsecond=0).timestamp())
if len(args_modified) == 4:
if args_modified[0] >= args_modified[2] and args_modified[1] <= args_modified[3]:
return True
return False
def trigger_create_event(self, request_data, task_data, user_id, request_obj: Request):
try:
site_templates = self.customer_projects_con.get_project_data_by_aggregate(
self.config_aggregate.get_project_template(request_data.get('project_id')))
site_templates = site_templates[0].get("data") if bool(site_templates) else []
hierarchy_id_str = ""
role_id = self.get_user_roles_by_project_id(user_id=user_id, project_id=request_data.get("project_id"))
user_role = role_id[0]
actions = self.get_actions(workflow_id=task_data.get('associated_workflow_id'),
workflow_version=task_data.get('associated_workflow_version'),
user_role=user_role, on_click=request_data.get('type'))
event_code = self.default_code
for state in actions:
if state["action_type"] == 'event':
event_code = state.get('event_codes', event_code)
logbook_data = self.logbook_conn.find_by_id(task_data.get("logbook_id"))
if hierarchy := self.task_engine.get_hierarchy(logbook_data.dict(), task_data):
hierarchy_id_str = self.task_engine.get_hierarchy_string(hierarchy, site_templates)
event_time = datetime.now().astimezone(pytz.timezone(request_data.get("tz"))).strftime("%Y-%m-%d %H:%M:%S")
event_dict = dict(asset_id=hierarchy_id_str, processes=','.join(logbook_data.logbook_tags),
user_action=request_data.get("type"),
user_trigger_data=dict(task_id=request_data.get("task_id"),
logbook_id=logbook_data.logbook_id,
submitted_data=request_data.get('submitted_data', {})),
query_key='task_id')
event_final_dict = dict(event_code=event_code, event_time=event_time, event_src_type="human_events",
project_id=request_data.get("project_id"), event_table="process_human_events",
event_data=event_dict)
event_payload = dict(data=event_final_dict, project_id=request_data.get("project_id"))
with httpx.Client() as client:
resp = client.post(url=self.events_api, cookies=request_obj.cookies, json=event_payload, timeout=15)
if resp.status_code in CommonStatusCode.SUCCESS_CODES:
json_res = resp.json()
logger.info(
f"Resp Message:{resp.status_code} \nCookies: {request_obj.cookies} \nRest API: {self.events_api}")
return json_res
elif resp.status_code == 404:
logger.info(f"Module not found: {self.events_api}")
elif resp.status_code == 401:
logger.info(f"Unauthorized to execute request on {self.events_api}")
logger.info(
f"Resp Message:{resp.status_code} \nCookies: {request_obj.cookies} \nRest API: {self.events_api}")
except Exception as e:
logger.exception(e)
raise
def get_actions(self, workflow_id, workflow_version, user_role, on_click):
trigger_data = self.trigger_conn.fetch_by_id(workflow_id=workflow_id,
workflow_version=workflow_version,
role=user_role,
on_click=on_click)
actions = trigger_data.actions
return actions
@staticmethod
def hit_external_service(api_url, payload=None, request_cookies=None,
timeout=int(os.environ.get("REQUEST_TIMEOUT", default=30)), method="post", params=None,
auth=None):
try:
logger.info(f"Inside function to hit external services\nURL - {api_url}")
payload_json = ExternRequest(url=api_url, timeout=timeout, cookies=request_cookies, params=params,
auth=auth)
payload_json = payload_json.dict(exclude_none=True)
if payload:
payload_json.update(json=payload)
with httpx.Client() as client:
for _ in range(3):
method_type = getattr(client, method)
resp = method_type(**payload_json)
logger.info(f"Resp Code:{resp.status_code}")
if resp.status_code in CommonStatusCode.SUCCESS_CODES:
return resp.json()
elif resp.status_code == 404:
logger.info(f"Module not found: {api_url}")
raise ModuleNotFoundError
elif resp.status_code == 401:
logger.info(f"Unauthorized to execute request on {api_url}")
logger.info(f"Resp Message:{resp.status_code} \nCookies: {request_cookies} \nRest API: {api_url}")
time.sleep(3)
except Exception as e:
logger.error(e)
raise
@staticmethod
def get_task_time(task_time, custom_model: CustomActionsModel, task_property_name, task_type="start"):
required_task_time = None
try:
task_time = task_time / 1000 if task_time else time.time()
if task_property_name in custom_model.submitted_data:
try:
required_task_time = parser.parse(timestr=custom_model.submitted_data.get(task_property_name))
if task_type.lower() == "start":
if required_task_time <= datetime.fromtimestamp(task_time,
tz=pytz.timezone(custom_model.tz)):
logger.info(f"OEE Start Time is less than Task Start Time")
required_task_time = task_time
except Exception as e:
logger.info(f"Exception occurred while converting datetime {e.args}")
required_task_time = None
except Exception as e:
logger.exception(f"Exception occurred while fetching the task creation time {e.args}")
if not required_task_time:
required_task_time = datetime.fromtimestamp(task_time, tz=pytz.timezone(custom_model.tz))
return required_task_time
from typing import List
import pandas as pd
from scripts.logging.logging import logger
from scripts.utils.common_utils import CommonUtils
class ProcessData:
def __init__(self, project_id=None):
self.common_utils = CommonUtils(project_id=project_id)
@staticmethod
def round_off(dfs, column_of_values):
try:
dfs[column_of_values] = dfs[column_of_values].astype('float64', errors='ignore')
dfs[column_of_values] = dfs[column_of_values].apply(
lambda row: round(row, 2) if isinstance(row, (int, float)) else row)
return dfs
except Exception as e:
logger.exception("Error occurred while rounding DF", {e})
return dfs
@staticmethod
def merge_with_another_df(df_1, df_2, merge_on: List):
try:
dfs = df_1.merge(df_2, on=merge_on, how='inner').fillna("-")
return dfs
except Exception as e:
logger.exception("Error occurred while merging DFs", {e})
raise
def convert_series_format(self, series, tz, in_format):
return series.apply(
lambda row: self.common_utils.time_zone_converter(row, tz, in_format))
def add_timestamp_to_df(self, df, date, tz, from_format):
if "time" not in df.columns:
return pd.Series(dtype=int)
df = df[df['time'].notna()]
date_series = df.apply(
lambda row: self.common_utils.convert_str_to_ts(date, row["time"], from_format, tz), axis=1)
return date_series
import json
from functools import lru_cache
@lru_cache()
def get_db_name(redis_client, project_id: str, database: str, delimiter="__"):
if not project_id:
return database
val = redis_client.get(project_id)
if val is None:
raise ValueError(
f"Unknown Project, Project ID: {project_id} Not Found!!!")
val = json.loads(val)
if not val:
return database
# Get the prefix flag to apply project_id prefix to any db
prefix_condition = bool(
val.get("source_meta", {}).get("add_prefix_to_database"))
if prefix_condition:
# Get the prefix name from mongo or default to project_id
prefix_name = val.get("source_meta", {}).get("prefix") or project_id
return f"{prefix_name}{delimiter}{database}"
return database
import pandas as pd
from formiodata import form
from scripts.logging.logging import logger
def parse_component(component_json, given_key, required_key):
"""Usage: print(parse_component(component_json, 'textField', "label"))"""
builder = form.Builder(component_json)
key_content = getattr(builder, "input_components").get(given_key)
return getattr(key_content, required_key)
def get_field_props(form_props, search_keys, value):
"""Pass custom properties dict and get a dictionary matching those conditions"""
if not isinstance(search_keys, list):
search_keys = [search_keys]
props = pd.DataFrame.from_dict(form_props, orient='index')
if not all([not props.empty, set(search_keys).intersection(set(props.columns))]):
return dict()
try:
new_df = props[props[search_keys[0]] == value]
except KeyError:
new_df = props[props[search_keys[1]] == value]
return new_df.to_dict(orient='index')
def check_required(key, component_json):
builder = form.Builder(component_json)
key_content = getattr(builder, "input_components").get(key)
if not key_content:
logger.debug(f"{key} not present in component, skipping required fields check..")
return False
is_required = bool(getattr(key_content, "required"))
return is_required
def get_field_props_by_keys(form_props, search_keys):
"""Pass custom properties dict and get a dictionary matching those conditions"""
if not isinstance(search_keys, list):
search_keys = [search_keys]
props = pd.DataFrame.from_dict(form_props, orient='index')
if not all([not props.empty, set(search_keys).intersection(set(props.columns))]):
return dict()
if len(search_keys) > 1:
new_df = props.get(search_keys[0], props.get(search_keys[-1]))
else:
new_df = props.get(search_keys[0])
new_df.dropna(inplace=True)
return new_df.to_dict()
def get_form_component_info(component_json, required_key):
builder = form.Builder(component_json)
return getattr(builder, required_key)
import json
from contextlib import suppress
from ilens_kafka_publisher.v2 import KafkaPublisher
from kafka import KafkaProducer
from scripts.config.app_configurations import KafkaConf
from scripts.db.redis_connections import partition_db
from scripts.logging.logging import logger
class DataPush:
def __init__(self, topic=KafkaConf.topic):
try:
self.obj = KafkaPublisher(kafka_host=KafkaConf.host,
kafka_port=int(KafkaConf.port),
kafka_topic=topic,
redis_client=partition_db,
enable_sites_partition=KafkaConf.enable_sites_partition,
split_key=KafkaConf.split_key,
round_robin_enable=KafkaConf.round_robin_enable)
except Exception as e:
logger.error(f"Could not connect to Kafka: {e}")
def publish_message(self, msg):
try:
self.obj.perform_task(msg)
except Exception as e:
logger.debug(f"Failed to publish message - {e}")
logger.debug(f"Trying reconnect")
class KafkaProducerUtil:
def __init__(self):
try:
self.host = KafkaConf.host
self.port = int(KafkaConf.port)
kafka_broker = f"{self.host}:{str(self.port)}"
self.producer = KafkaProducer(bootstrap_servers=kafka_broker,
value_serializer=lambda v: v.encode('utf-8'),
api_version=(0, 10, 1))
self.producer.flush()
except Exception as e:
logger.error(f"Kafka connection error: {e}")
def publish(self, topic, data):
try:
_ = self.producer.send(topic, data)
logger.debug(f" Message sent to kafka-> LEN: {len(data)}")
return True
except Exception as e:
logger.error(e)
class KairosWriter:
def write_data(self, data_json, topic, project_id):
kafka_conn = DataPush(topic)
logger.debug(f"Data being pushed to kafka topic: {topic}")
msg_counter = 0
for k, v in data_json.items():
timestamp, data, site_ids = self.data_validator(k, v)
if not data:
continue
for each in site_ids:
values = {tag_id: value for tag_id, value in data.items() if tag_id.startswith(each)}
if not values:
continue
write_json = {
"data": values,
"site_id": each[:-1],
"gw_id": "",
"pd_id": "",
"p_id": project_id,
"timestamp": timestamp,
"msg_id": msg_counter,
"retain_flag": False
}
if topic == KafkaConf.backdated_topic:
write_json.update({"backup": True})
logger.debug(f"Timestamp: {timestamp}, Values: {data}")
kafka_conn.publish_message(msg=write_json)
msg_counter += 1
return msg_counter
@staticmethod
def audit_data(data_json, topic):
old_kafka_conn = KafkaProducerUtil()
logger.debug(f"Audit Data being pushed to kafka topic: {topic}")
msg_counter = len(data_json)
for each in data_json:
audit_json = dict(model="form_data_audits", record=each)
old_kafka_conn.publish(topic, json.dumps(audit_json))
return msg_counter
@staticmethod
def data_validator(timestamp, data):
__temp__ = {}
site_ids = set()
for k, v in data.items():
if not k.startswith("site"):
continue
if isinstance(v, int) or isinstance(v, float):
__temp__[k] = v
site_id = f"{k.split('$')[0]}$"
site_ids.add(site_id)
continue
with suppress(ValueError):
__temp__[k] = float(v)
site_id = f"{k.split('$')[0]}$"
site_ids.add(site_id)
return int(timestamp), __temp__, site_ids
from ilens_kafka_publisher import KafkaPublisher
from scripts.config.app_configurations import KafkaConf
from scripts.logging.logging import logger
# For Audit logs
class DataPush:
def __init__(self):
try:
self.obj = KafkaPublisher(kafka_host=KafkaConf.host,
kafka_port=int(KafkaConf.port),
kafka_topic=KafkaConf.topic,
redis_client=KafkaConf.redis_db,
enable_sites_partition=KafkaConf.enable_sites_partition,
split_key=KafkaConf.split_key,
round_robin_enable=KafkaConf.round_robin_enable)
except Exception as e:
logger.error(f"Could not connect to Kafka: {e}")
def publish_message(self, msg):
try:
self.obj.perform_task(msg)
except Exception as e:
logger.debug(f"Failed to publish message - {e}")
logger.debug(f"Trying reconnect")
from typing import Dict, List, Optional
from pymongo import MongoClient
from pymongo.cursor import Cursor
from scripts.db.redis_connections import project_details_db
from scripts.logging.logging import logger
from scripts.utils.db_name_util import get_db_name
class MongoConnect:
def __init__(self, uri):
try:
self.uri = uri
self.client = MongoClient(self.uri, connect=False)
except Exception:
raise
def __call__(self, *args, **kwargs):
return self.client
def __repr__(self):
return f"Mongo Client(uri:{self.uri}, server_info={self.client.server_info()})"
class MongoCollectionBaseClass:
def __init__(self, mongo_client, database, collection):
self.client = mongo_client
self.database = database
self.collection = collection
self.__database = None
def __repr__(self):
return f"{self.__class__.__name__}(database={self.database}, collection={self.collection})"
@property
def project_id(self):
return self.project_id
@project_id.setter
def project_id(self, project_id):
if self.__database is None:
# storing original db name if None
self.__database = self.database
self.database = get_db_name(
redis_client=project_details_db,
project_id=project_id,
database=self.__database)
def insert_one(self, data: Dict):
"""
The function is used to inserting a document to a collection in a Mongo Database.
:param data: Data to be inserted
:return: Insert ID
"""
try:
database_name = self.database
collection_name = self.collection
db = self.client[database_name]
collection = db[collection_name]
response = collection.insert_one(data)
logger.qtrace(data)
return response.inserted_id
except Exception:
raise
def insert_many(self, data: List):
"""
The function is used to inserting documents to a collection in a Mongo Database.
:param data: List of Data to be inserted
:return: Insert IDs
"""
try:
database_name = self.database
collection_name = self.collection
db = self.client[database_name]
collection = db[collection_name]
response = collection.insert_many(data)
logger.qtrace(data)
return response.inserted_ids
except Exception:
raise
def find(
self,
query: Dict,
filter_dict: Optional[Dict] = None,
sort=None,
skip: Optional[int] = 0,
limit: Optional[int] = None,
) -> Cursor:
"""
The function is used to query documents from a given collection in a Mongo Database
:param query: Query Dictionary
:param filter_dict: Filter Dictionary
:param sort: List of tuple with key and direction. [(key, -1), ...]
:param skip: Skip Number
:param limit: Limit Number
:return: List of Documents
"""
if sort is None:
sort = list()
if filter_dict is None:
filter_dict = {"_id": 0}
database_name = self.database
collection_name = self.collection
try:
db = self.client[database_name]
collection = db[collection_name]
if len(sort) > 0:
cursor = (
collection.find(
query,
filter_dict,
)
.sort(sort)
.skip(skip)
)
else:
cursor = collection.find(
query,
filter_dict,
).skip(skip)
if limit:
cursor = cursor.limit(limit)
logger.qtrace(f"{query}, {filter_dict}")
return cursor
except Exception:
raise
def find_one(self, query: Dict, filter_dict: Optional[Dict] = None):
try:
database_name = self.database
collection_name = self.collection
if filter_dict is None:
filter_dict = {"_id": 0}
db = self.client[database_name]
collection = db[collection_name]
response = collection.find_one(query, filter_dict)
logger.qtrace(f"{self.collection}, {query}, {filter_dict}")
return response
except Exception:
raise
def update_one(self, query: Dict, data: Dict, upsert: bool = False):
"""
:param upsert:
:param query:
:param data:
:return:
"""
try:
database_name = self.database
collection_name = self.collection
db = self.client[database_name]
collection = db[collection_name]
response = collection.update_one(query, {"$set": data}, upsert=upsert)
logger.qtrace(f"{self.collection}, {query}, {data}")
return response.modified_count
except Exception:
raise
def update_to_set(self, query: Dict, param: str, data: Dict, upsert: bool = False):
"""
:param upsert:
:param query:
:param param:
:param data:
:return:
"""
try:
database_name = self.database
collection_name = self.collection
db = self.client[database_name]
collection = db[collection_name]
response = collection.update_one(
query, {"$addToSet": {param: data}}, upsert=upsert
)
logger.qtrace(f"{self.collection}, {query}, {data}")
return response.modified_count
except Exception:
raise
def update_many(self, query: Dict, data: Dict, upsert: bool = False):
"""
:param upsert:
:param query:
:param data:
:return:
"""
try:
database_name = self.database
collection_name = self.collection
db = self.client[database_name]
collection = db[collection_name]
response = collection.update_many(query, {"$set": data}, upsert=upsert)
logger.qtrace(f"{query}, {data}")
return response.modified_count
except Exception:
raise
def delete_many(self, query: Dict):
"""
:param query:
:return:
"""
try:
database_name = self.database
collection_name = self.collection
db = self.client[database_name]
collection = db[collection_name]
response = collection.delete_many(query)
logger.qtrace(query)
return response.deleted_count
except Exception:
raise
def delete_one(self, query: Dict):
"""
:param query:
:return:
"""
try:
database_name = self.database
collection_name = self.collection
db = self.client[database_name]
collection = db[collection_name]
response = collection.delete_one(query)
logger.qtrace(query)
return response.deleted_count
except Exception:
raise
def distinct(self, query_key: str, filter_json: Optional[Dict] = None):
"""
:param query_key:
:param filter_json:
:return:
"""
try:
database_name = self.database
collection_name = self.collection
db = self.client[database_name]
collection = db[collection_name]
response = collection.distinct(query_key, filter_json)
logger.qtrace(f"{query_key}, {filter_json}")
return response
except Exception:
raise
def aggregate(
self,
pipelines: List,
):
try:
database_name = self.database
collection_name = self.collection
db = self.client[database_name]
collection = db[collection_name]
response = collection.aggregate(pipelines)
logger.qtrace(f"{self.collection}, {pipelines}")
return response
except Exception:
raise
class MongoAggregateBaseClass:
def __init__(
self,
mongo_client,
database,
):
self.client = mongo_client
self.database = database
def aggregate(
self,
collection,
pipelines: List,
):
try:
database_name = self.database
collection_name = collection
db = self.client[database_name]
collection = db[collection_name]
response = collection.aggregate(pipelines)
logger.qtrace(f"{collection}, {pipelines}")
return response
except Exception:
raise
import json
import paho.mqtt.client as mqtt
from scripts.config.app_configurations import MQTTConf
from scripts.logging.logging import logger
def on_connect(rc):
logger.debug("Publisher Connected with result code " + str(rc))
def push_notification(notification, user_id):
try:
client = mqtt.Client()
client.on_connect = on_connect
client.connect(MQTTConf.host, MQTTConf.port, 30)
topic = f"{MQTTConf.publish_base_topic}/{user_id}/tasks"
if not client.is_connected():
client.reconnect()
client.publish(topic, json.dumps(notification), retain=False, qos=1)
logger.info(f"Notification message published to {topic}")
logger.debug(f"Notification: {notification}")
client.disconnect()
return True
except Exception as e:
logger.exception(f"Exception at MQTT Publish: {e}")
return False
import base64
from Crypto import Random
from Crypto.Cipher import AES
class AESCipher:
"""
A classical AES Cipher. Can use any size of data and any size of password thanks to padding.
Also ensure the coherence and the type of the data with a unicode to byte converter.
"""
def __init__(self, key):
self.bs = 16
self.key = AESCipher.str_to_bytes(key)
@staticmethod
def str_to_bytes(data):
u_type = type(b''.decode('utf8'))
if isinstance(data, u_type):
return data.encode('utf8')
return data
def _pad(self, s):
return s + (self.bs - len(s) % self.bs) * AESCipher.str_to_bytes(chr(self.bs - len(s) % self.bs))
@staticmethod
def _unpad(s):
return s[:-ord(s[len(s) - 1:])]
def encrypt(self, raw):
raw = self._pad(AESCipher.str_to_bytes(raw))
iv = Random.new().read(AES.block_size)
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return base64.b64encode(iv + cipher.encrypt(raw)).decode('utf-8')
def decrypt(self, enc):
enc = base64.b64decode(enc)
iv = enc[:AES.block_size]
cipher = AES.new(self.key, AES.MODE_CBC, iv)
data = self._unpad(cipher.decrypt(enc[AES.block_size:]))
return data.decode('utf-8')
import uuid
from datetime import timedelta, datetime
from scripts.constants import Secrets
from scripts.db.redis_connections import login_db
from scripts.utils.security_utils.jwt_util import JWT
jwt = JWT()
def create_token(user_id, ip, token, age=Secrets.LOCK_OUT_TIME_MINS, login_token=None):
"""
This method is to create a cookie
"""
try:
uid = login_token
if not uid:
uid = str(uuid.uuid4()).replace("-", "")
payload = {
"ip": ip,
"user_id": user_id,
"token": token,
"uid": uid
}
exp = datetime.utcnow() + timedelta(minutes=age)
_extras = {"iss": Secrets.issuer, "exp": exp}
_payload = {**payload, **_extras}
new_token = jwt.encode(_payload)
# Add session to redis
login_db.set(uid, new_token)
login_db.expire(uid, timedelta(minutes=age))
return uid
except Exception:
raise
from secrets import compare_digest
from fastapi import Response, Request, HTTPException
from fastapi.openapi.models import APIKey, APIKeyIn
from fastapi.security import APIKeyCookie
from fastapi.security.api_key import APIKeyBase
from scripts.config.app_configurations import Service
from scripts.constants import Secrets
from scripts.db.redis_connections import login_db
from scripts.utils.security_utils.apply_encrytion_util import create_token
from scripts.utils.security_utils.jwt_util import JWT
class CookieAuthentication(APIKeyBase):
"""
Authentication backend using a cookie.
Internally, uses a JWT token to store the data.
"""
scheme: APIKeyCookie
cookie_name: str
cookie_secure: bool
def __init__(
self,
cookie_name: str = "login-token",
):
super().__init__()
self.model: APIKey = APIKey(**{"in": APIKeyIn.cookie}, name=cookie_name)
self.scheme_name = self.__class__.__name__
self.cookie_name = cookie_name
self.scheme = APIKeyCookie(name=self.cookie_name, auto_error=False)
self.login_redis = login_db
self.jwt = JWT()
async def __call__(self, request: Request, response: Response) -> str:
cookies = request.cookies
login_token = cookies.get("login-token")
if not login_token:
login_token = request.headers.get("login-token")
if not login_token:
raise HTTPException(status_code=401)
jwt_token = self.login_redis.get(login_token)
if not jwt_token:
raise HTTPException(status_code=401)
try:
decoded_token = self.jwt.validate(token=jwt_token)
if not decoded_token:
raise HTTPException(status_code=401)
except Exception as e:
raise HTTPException(status_code=401, detail=e.args)
user_id = decoded_token.get("user_id")
_token = decoded_token.get("token")
_token_age = int(decoded_token.get("age", Secrets.LOCK_OUT_TIME_MINS))
cookie_user_id = request.cookies.get(
"user_id", request.cookies.get(
"userId", request.headers.get("userId")
)
)
if not compare_digest(Secrets.token, _token):
raise HTTPException(status_code=401)
if login_token != decoded_token.get("uid"):
raise HTTPException(status_code=401)
if cookie_user_id and not compare_digest(user_id, cookie_user_id):
raise HTTPException(status_code=401)
try:
new_token = create_token(
user_id=user_id,
ip=request.client.host,
token=Secrets.token,
login_token=login_token,
age=_token_age
)
except Exception as e:
raise HTTPException(status_code=401, detail=e.args)
response.set_cookie(
'login-token',
new_token,
samesite='strict',
httponly=True,
secure=Service.secure_cookie,
max_age=Secrets.LOCK_OUT_TIME_MINS * 60,
)
response.headers['login-token'] = new_token
return user_id
import jwt
from jwt.exceptions import (
InvalidSignatureError,
ExpiredSignatureError,
MissingRequiredClaimError,
)
from scripts.config.app_configurations import KeyPath
from scripts.constants import Secrets
from scripts.logging.logging import logger
class JWT:
def __init__(self):
self.max_login_age = Secrets.LOCK_OUT_TIME_MINS
self.issuer = Secrets.issuer
self.alg = Secrets.alg
self.public = KeyPath.public
self.private = KeyPath.private
def encode(self, payload):
try:
with open(self.private, "r") as f:
key = f.read()
return jwt.encode(payload, key, algorithm=self.alg)
except Exception as e:
logger.exception(f'Exception while encoding JWT: {str(e)}')
raise
finally:
f.close()
def validate(self, token):
try:
with open(self.public, "r") as f:
key = f.read()
payload = jwt.decode(
token,
key,
algorithms=self.alg,
leeway=Secrets.leeway_in_mins,
options={"require": ["exp", "iss"]},
)
return payload
except InvalidSignatureError:
raise AuthenticationError(ErrorMessages.ERROR003)
except ExpiredSignatureError:
raise AuthenticationError(ErrorMessages.ERROR002)
except MissingRequiredClaimError:
raise AuthenticationError(ErrorMessages.ERROR002)
except Exception as e:
logger.exception(f'Exception while validating JWT: {str(e)}')
raise
finally:
f.close()
from typing import Optional
from fastapi import Response, Request
from fastapi.openapi.models import APIKey, APIKeyIn
from fastapi.security.api_key import APIKeyBase, APIKeyCookie
from pydantic import BaseModel
class MetaInfoSchema(BaseModel):
project_id: Optional[str] = ""
user_id: Optional[str] = ""
language: Optional[str] = ""
class MetaInfoCookie(APIKeyBase):
"""
Project ID backend using a cookie.
"""
scheme: APIKeyCookie
cookie_name: str
def __init__(self, cookie_name: str = "projectId"):
super().__init__()
self.model: APIKey = APIKey(**{"in": APIKeyIn.cookie}, name=cookie_name)
self.cookie_name = cookie_name
self.scheme_name = self.__class__.__name__
self.scheme = APIKeyCookie(name=self.cookie_name, auto_error=False)
async def __call__(self, request: Request, response: Response):
cookies = request.cookies
cookie_json = {
"projectId": cookies.get("projectId", request.headers.get("projectId")),
"userId": cookies.get("user_id", cookies.get("userId", request.headers.get("userId"))),
"language": cookies.get("language", request.headers.get("language"))
}
return MetaInfoSchema(project_id=cookie_json["projectId"], user_id=cookie_json["userId"],
language=cookie_json["language"])
@staticmethod
def set_response_info(cookie_name, cookie_value, response: Response):
response.set_cookie(
cookie_name,
cookie_value,
samesite="strict",
httponly=True
)
response.headers[cookie_name] = cookie_value
from sqlalchemy import Float, Integer, Text, update, create_engine
from sqlalchemy import MetaData, Column, Table
from sqlalchemy.dialects.postgresql import JSON
from sqlalchemy.orm import Session
from sqlalchemy_utils import database_exists, create_database
from scripts.config.app_configurations import DBConf
from scripts.db.psql.databases import Base
from scripts.logging.logging import logger as LOG, logging_config
from scripts.utils.common_utils import CommonUtils
class TicketEntry(Base):
__tablename__ = "ticket_entry"
workflow_id = Column(Text)
template_id = Column(Text)
ticket_title = Column(Text)
site_hierarchy = Column(Text)
data = Column(JSON)
user_id = Column(Text)
created_on = Column(Float(precision=20, decimal_return_scale=True))
last_updated = Column(Float(precision=20, decimal_return_scale=True))
expiry_date = Column(Float(precision=20, decimal_return_scale=True))
assign_to = Column(Text)
id = Column(Integer, primary_key=True, autoincrement=True)
event_type = Column(Text)
event_status = Column(Text)
project_id = Column(Text)
@staticmethod
def column_template_id():
return 'template_id'
@staticmethod
def column_workflow_id():
return 'workflow_id'
@staticmethod
def column_ticket_title():
return 'ticket_title'
@staticmethod
def column_site_hierarchy():
return 'site_hierarchy'
@staticmethod
def column_event_status():
return 'event_status'
@staticmethod
def column_audit_type():
return 'audit_type'
@staticmethod
def column_event_type():
return 'event_type'
@staticmethod
def column_event_id():
return 'event_id'
@staticmethod
def column_created_on():
return "created_on"
@staticmethod
def column_last_updated():
return "last_updated"
@staticmethod
def column_expiry_date():
return "expiry_date"
@staticmethod
def column_assign_to():
return "assign_to"
@staticmethod
def column_user_id():
return "user_id"
@staticmethod
def column_data():
return "data"
@staticmethod
def column_id():
return "id"
@staticmethod
def column_project_id():
return "project_id"
def pagination_search(self, search_query, table):
row = self.session.query(table)
def table_def_user_entry(self, meta=MetaData()):
return Table(self.__tablename__, meta,
Column(self.column_template_id(), Text),
Column(self.column_workflow_id(), Text),
Column(self.column_ticket_title(), Text),
Column(self.column_site_hierarchy(), Text),
Column(self.column_data(), JSON),
Column(self.column_user_id(), Text),
Column(self.column_created_on(), Float(precision=20, decimal_return_scale=True)),
Column(self.column_last_updated(), Float(precision=20, decimal_return_scale=True)),
Column(self.column_expiry_date(), Float(precision=20, decimal_return_scale=True)),
Column(self.column_assign_to(), Text),
Column(self.column_event_status(), Text),
Column(self.column_event_type(), Text),
Column(self.column_id(), Integer, primary_key=True, autoincrement=True),
Column(self.column_project_id(), Text))
class SQLDBUtils(CommonUtils):
def __init__(self, db: Session):
self.session: Session = db
self.filter = None
self.echo = logging_config["level"].upper() == "DEBUG"
super().__init__()
def add_data(self, table):
self.session.add(table)
self.session.commit()
self.session.flush(table)
@staticmethod
def enable_traceback():
return True
def create_db(self):
try:
engine = create_engine(DBConf.MAINTENANCE_DB_URI, echo=self.echo)
if not database_exists(engine.url):
create_database(engine.url)
except Exception as e:
LOG.error(f"Error occurred during start-up: {e}", exc_info=True)
@staticmethod
def create_all_tables(engine, meta):
if not engine.dialect.has_table(engine, TicketEntry().__tablename__):
TicketEntry().table_def_user_entry(meta=meta)
meta.create_all(engine)
try:
column = Column(TicketEntry().column_workflow_id(), Text(), primary_key=False)
SQLDBUtils().add_column(engine, TicketEntry().__tablename__, column, meta)
except:
pass
@staticmethod
def add_column(engine, table_name, column, meta):
t = Table(table_name, meta, autoload_with=engine)
columns = [m.key for m in t.columns]
column_name = column.compile(dialect=engine.dialect)
if column_name in columns:
return
column_type = column.type.compile(engine.dialect)
engine.execute('ALTER TABLE %s ADD COLUMN %s %s' % (table_name, column_name, column_type))
@staticmethod
def key_filter_expression():
return "expression"
@staticmethod
def key_filter_column():
return "column"
@staticmethod
def key_filter_value():
return "value"
def filter_expression(self):
filter_expression = self.filter.get(self.key_filter_expression(), 'eq')
LOG.debug(f"Filter expression: {filter_expression}")
return filter_expression
def filter_column(self):
column = self.filter.get(self.key_filter_column(), None)
LOG.debug(f"Filter column: {column}")
return column
def filter_value(self):
filter_value = self.filter.get(self.key_filter_value(), None)
LOG.debug(f"Filter value: {filter_value}")
return filter_value
def _filter(self, session_query, filters=None):
if filters is not None:
for _filter in filters:
self.filter = _filter
if self.filter_column() is None:
continue
session_query = self.get_session_query(session_query=session_query)
return session_query
def get_session_query(self, session_query):
try:
if self.filter_expression() == 'eq':
session_query = session_query.filter(self.filter_column() == self.filter_value())
if self.filter_expression() == 'le':
session_query = session_query.filter(self.filter_column() < self.filter_value())
if self.filter_expression() == 'ge':
session_query = session_query.filter(self.filter_column() > self.filter_value())
if self.filter_expression() == 'lte':
session_query = session_query.filter(self.filter_column() <= self.filter_value())
if self.filter_expression() == 'gte':
session_query = session_query.filter(self.filter_column() >= self.filter_value())
if self.filter_expression() == 'neq':
session_query = session_query.filter(self.filter_column() != self.filter_value())
except Exception as e:
LOG.error(f"Error occurred while filtering the session query {e}", exc_info=self.enable_traceback())
return session_query
def insert_one(self, session, table, insert_json):
try:
row = table()
for k in insert_json:
setattr(row, k, insert_json[k])
session.merge(row)
session.commit()
session.close()
return True
except Exception as e:
LOG.error(f"Error while inserting the record {e}", exc_info=self.enable_traceback())
raise
def update(self, table, update_json, filters=None, insert=False, insert_id=None):
try:
LOG.debug(filters)
session = self.session
row = session.query(table)
filtered_row = self._filter(session_query=row, filters=filters)
filtered_row = filtered_row.first()
if filtered_row is None:
LOG.debug("There are no rows meeting the given update criteria.")
if insert:
LOG.debug("Trying to insert a new record")
if insert_id is None:
LOG.warning("ID not provided to insert record. Skipping insert.")
return False
else:
update_json.update(insert_id)
if self.insert_one(session=session, table=table, insert_json=update_json):
return True
else:
return False
else:
return False
else:
LOG.debug("Record available to update")
for k in update_json:
setattr(filtered_row, k, update_json[k])
# filtered_row.update()
session.commit()
session.close()
except Exception as e:
LOG.error(f"Error while updating the record {e}", exc_info=self.enable_traceback())
raise
def update_many(self, table, update_json, filters, conn):
try:
stmt = (update(table).where(filters).values(update_json))
conn.execute(stmt)
conn.close()
except Exception as e:
LOG.error(f"Error while updating the record {e}", exc_info=self.enable_traceback())
raise
def delete(self, table, filters=None):
try:
# LOG.trace(filters)
session = self.session
row = session.query(table)
filtered_row = self._filter(session_query=row, filters=filters)
if filtered_row is None:
LOG.debug("There were no records to be deleted")
session.close()
else:
filtered_row.delete()
session.commit()
session.close()
return True
except Exception as e:
LOG.error(f"Failed to delete a record {e}", exc_info=self.enable_traceback())
raise
def distinct_values_by_column(self, table, session, column, filters=None):
query = session.query(getattr(table, column).distinct().label(column))
query = self._filter(session_query=query, filters=filters)
distinct_values = [getattr(row, column) for row in query.all()]
session.close()
return distinct_values
def execute_query(self, table=None, query=None):
session = self.session
if query is None:
query = f"select * from {table}"
result = session.execute(query)
output = [dict(zip(row.keys(), row.values())) for row in result]
session.close()
return output
@staticmethod
def fetch_from_table(table, session, filter_text, limit_value, skip_value, project_id):
LOG.debug(filter_text)
row = session.query().filter(Text(filter_text)).limit(limit_value).offset(
skip_value)
result = session.execute(row)
output = [dict(zip(row.keys(), row.values())) for row in result]
session.close()
return output
class TicketEntryTable(SQLDBUtils):
def __init__(self, db: Session):
super().__init__(db)
self.table = TicketEntry
from scripts.db import StepCollection, mongo_client, TaskInstanceData, TaskInstance
from scripts.utils.common_utils import CommonUtils
class StageParser:
def __init__(self, project_id=None):
self.stage_conn = StepCollection(mongo_client, project_id=project_id)
self.common_utils = CommonUtils(project_id=project_id)
self.step_conn = StepCollection(mongo_client, project_id=project_id)
self.task_inst_data = TaskInstanceData(mongo_client, project_id=project_id)
self.task_inst_conn = TaskInstance(mongo_client, project_id=project_id)
def get_stage_parser(self, stages):
stage_data, steps = self.task_inst_data.get_stage_map_steps(stages)
step_data = self.step_conn.get_step_map(steps)
left_nav = list()
top_nav = list()
for stage in stages:
step = stage_data[stage]
navigation = step_data[step]
if navigation == "left_navigation":
left_nav.append(stage)
else:
top_nav.append(stage)
return dict(left=left_nav, right=top_nav)
deployment:
environmentVar:
- name: MODULE_NAME
value: "form-management"
- name: MONGO_URI
valueFrom:
secretKeyRef:
name: mongo-creds
key: MONGO_URI
- name: MAINTENANCE_URI
value: "postgresql://ilens:iLens#4321@postgres-db-service.ilens-infra.svc.cluster.local:5432/maintenance_logbook_qa"
- name: KAIROS_URI
value: "http://ilens-timeseries1-kairosdb.ilens-infra.svc.cluster.local:80"
- name: REDIS_URI
value: "redis://redis-db-service.ilens-infra:6379"
- name: FORM_DE
value: "http://ebpr-periodic-data-engine.ilens-core:2699/"
- name: METADATA_DB
value: "ilens_configuration"
- name: BASE_PATH
value: "/code/data"
- name: MOUNT_DIR
value: "form-management"
- name: ILENS_ASSISTANT
value: "ilens_assistant"
- name: KAFKA_HOST
value: "kafka-0.kafka-headless.ilens-infra.svc.cluster.local"
- name: KAFKA_PORT
value: "9092"
- name: KAFKA_TOPIC
value: "ilens_dev"
- name: KAFKA_AUDIT_TOPIC
value: "audit_logs"
- name: ASSISTANT_URI
value: "postgresql://ilens:iLens#4321@postgres-db-service.ilens-infra.svc.cluster.local:5432/ilens_assistant"
- name: FORM_MT
value: "http://form-management.ilens-core.svc.cluster.local:5121/"
- name: PERIODIC_ENTRY_AUDITING
value: "true"
- name: FORM_NON_PERIODIC_AUDITING
value: "true"
- name: FORM_PERIODIC_AUDITING
value: "true"
- name: ENABLE_KAFKA_PARTITION
value: "true"
- name: ROUND_ROBIN_PARTITION
value: "true"
- name: INTERVAL
value: "60"
- name: MQTT_URL
value: "mqtt-service.ilens-infra.svc.cluster.local"
- name: MQTT_PORT
value: "1883"
- name: EMAIL_SERVICE_PROXY
value: "https://cloud.ilens.io/sms-util"
- name: SECURE_ACCESS
value: "true"
- name: METADATA_SERVICES
value: "http://metadata-service.ilens-core.svc.cluster.local:8989/"
- name: AUDIT_PROXY
value: "http://ilens-audit-tracker.ilens-core.svc.cluster.local:1223"
- name: CORS_URLS
value: "qa.ilens.io"
- name: SW_DOCS_URL
value: "/docs"
- name: SW_OPENAPI_URL
value: "/openapi.json"
- name: ENABLE_CORS
value: "True"
- name: SECURE_COOKIE
value: "True"
- name: KAFKA_HISTORY_OUTPUT_TOPIC
value: "ilens_prod_backup"
- name: TRIGGER_BANDWIDTH
value: "300"
- name: LOG_LEVEL
value: "INFO"
- name: ENABLE_EVENTS
value: "True"
- name: ILENS_EVENTS
value: "http://events-processor-services.ilens-core.svc.cluster.local:8122"
- name: DEFAULT_EVENT_CODE
value: "user_triggered_logbook_events"
- name: OEE_SERVICES
value: "http://oee-services.ilens-core:6869/"
- name: VISUALIZATION
value: "http://visualization-4.ilens-core.svc.cluster.local:1112/"
- name: HIERARCHY
value: "http://hierarchy-services.ilens-core:7008/"
- name: DIGITAL_TWIN_SERVICE
value: "http://digital-twin-service.ilens-core.svc.cluster.local:5555"
- name: DIGITAL_TWIN_ENGINE
value: "http://digital-twin-engine.ilens-core:5556/"
- name: DEVICE_CONTROL_PLANE_SERVICE
value: "http://device-control-plane-service.ilens-core:8558/"
- name: DEVICE_CONTROL_PLANE_WEB_SERVICE
value: "http://device-control-plane-web-service.ilens-core:8558/"
- name: RULES_ALERT_SERVICES
value: "http://rules-alerts-services.ilens-core:8586/"
- name: EBPR_ENGINE
value: "http://ebpr-engine.ilens-core:6968/"
- name: ELECTRONIC_LOGBOOK
value: "http://electronic-logbook-core.ilens-core:28788/e_logbook/"
- name: GLOBAL_CATALOG
value: "http://global-catalog.ilens-core:5001/"
- name: DATA_IMPORT
value: "http://data-import.ilens-core.svc.cluster.local:8191/"
- name: REPORT_ENGINE
value: "http://report-engine.ilens-core:6666/"
- name: MES_MODULE
value: "http://mes-module.ilens-core:14563/mes/"
- name: EBPR_PERIODIC_DATA_ENGINE
value: "http://ebpr-periodic-data-engine.ilens-core:2699/"
- name: DATA_PUBLISHER
value: "http://data-publisher.ilens-core:8767/"
- name: ILENS_SCHEDULER_SERVICE
value: "http://ilens-scheduler-service.ilens-core:28595/"
- name: EBPR_REPORT_ENGINE
value: "http://ebpr-report-engine.ilens-core:45678/"
- name: WORKFLOW_MANAGEMENT
value: "http://workflow-management.ilens-core:7120/"
- name: AUDIT_MANAGEMENT
value: "http://audit-management.ilens-core:5111/"
- name: ILENS_AUDIT_TRACKER
value: "http://ilens-audit-tracker.ilens-core:1223/"
- name: EVENT_EXPLORER
value: "http://event-explorer.ilens-core:9897/event_explorer/"
- name: USER_ACCESS_ROLE_MANAGEMENT
value: "http://user-access-role-management.ilens-core:5122/"
- name: SIMULATION_SERVICES
value: "http://simulation-services.ilens-core:40123/"
- name: MQTT_SERVICES
value: "http://mqtt-service.ilens-infra.svc.cluster.local:8083/mqtt"
- name: CUSTOM_FORM_SERVICES
value: "http://custom-form-services.ilens-core:2729/"
- name: HFE_REPORT_GENERATOR_SERVICE
value: "http://hfe-report-generator-service.ilens-core:7877/"
- name: MAINTENANCE_DASHBOARD_SERVICES
value: "http://maintenance-dashboard-services.ilens-core:8099/"
- name: HINDALCO_GENEALOGY_TRACKING
value: "http://hindalco-genealogy-tracking.ilens-core:8853/"
- name: DOWNTIME_OEE
value: "http://downtime-oee.ilens-core:1998/"
- name: FORM_MANAGEMENT
value: "http://form-management.ilens-core:5121/"
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment