Commit a6872d56 authored by shikhin-1998's avatar shikhin-1998

initial commit

parent 3c95fe7d
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
\ No newline at end of file
stages:
- auto-tagging
- validate
- scan
- build
- deploy
- update
variables:
MYSQL_CONNECTION: "mysql -h $MYSQL_HOST -u $MYSQL_USER -p$MYSQL_PASS "
STATUS_SCRIPT: /home/gitlab-runner/monitor/deployment-status.sh
HELM_CHART: /home/gitlab-runner/kubernetes/ilens/$QA_ENV/ilens-modules
# VARIABLES_YML: variables.yml
# DEPLOYMENT_YML: ebpr-report-engine.yml ebpr-report-engine-celery.yml
TIMEOUT: 1020s
before_script:
- val=`echo $($MYSQL_CONNECTION -e "SELECT COUNT(*) FROM $VERSION_DB.$DB_TABLE WHERE category='Server' AND type='Service' AND os='docker' AND module_name='$CI_PROJECT_NAME' ") | cut -d " " -f2`
- if [ $val == 0 ]; then $MYSQL_CONNECTION -e "INSERT INTO $VERSION_DB.$DB_TABLE values('Server','Service','$CI_PROJECT_NAME','docker', '2', '0', '0', '0')";fi
- QA=$($MYSQL_CONNECTION -N -e "SELECT qa FROM $VERSION_DB.$DB_TABLE where module_name = '$CI_PROJECT_NAME' AND type = 'Service' AND category = 'Server' AND os = 'docker'")
- DEV=$($MYSQL_CONNECTION -N -e "SELECT dev FROM $VERSION_DB.$DB_TABLE where module_name = '$CI_PROJECT_NAME' AND type = 'Service' AND category = 'Server' AND os = 'docker'")
- PROD=$($MYSQL_CONNECTION -N -e "SELECT prod FROM $VERSION_DB.$DB_TABLE where module_name = '$CI_PROJECT_NAME' AND type = 'Service' AND category = 'Server' AND os = 'docker'")
auto-tagging:
stage: auto-tagging
before_script:
- val=`echo $($MYSQL_CONNECTION -e "SELECT COUNT(*) FROM $VERSION_DB.$VERSION_RELEASE_TABLE WHERE module_name='$CI_PROJECT_NAME' ") | cut -d " " -f2`
- if [ $val == 0 ]; then $MYSQL_CONNECTION -N -e "INSERT INTO $VERSION_DB.$VERSION_RELEASE_TABLE values('$CI_PROJECT_NAME', 'iLens', '5', '6', '0', '0')";fi
- ILENS=$($MYSQL_CONNECTION -N -e "SELECT ilens_version FROM "$VERSION_DB.$VERSION_RELEASE_TABLE" where module_name = '$CI_PROJECT_NAME'")
- RELEASE=$($MYSQL_CONNECTION -N -e "SELECT release_version FROM "$VERSION_DB.$VERSION_RELEASE_TABLE" where module_name = '$CI_PROJECT_NAME'")
- FEATURE=$($MYSQL_CONNECTION -N -e "SELECT feature_version FROM "$VERSION_DB.$VERSION_RELEASE_TABLE" where module_name = '$CI_PROJECT_NAME'")
- PATCH=$($MYSQL_CONNECTION -N -e "SELECT patch_version FROM "$VERSION_DB.$VERSION_RELEASE_TABLE" where module_name = '$CI_PROJECT_NAME'")
script:
- SOURCE_BRANCH=$(echo $CI_COMMIT_TITLE | cut -f 3 -d " " | cut -f 1 -d "/" | cut -f 2 -d "'")
- >
if [ "$SOURCE_BRANCH" = "QA" ]; then
((RELEASE=RELEASE+1)) && FEATURE=0 && PATCH=0;
TAG_NAME=v$ILENS.$RELEASE.$FEATURE
IMAGE_URL=azrilensprod.azurecr.io/ilens/release/versions/v"$ILENS.$RELEASE:$CI_PROJECT_NAME-$TAG_NAME"
PROD=$RELEASE; QA=0; DEV=0;
$MYSQL_CONNECTION -e "UPDATE $VERSION_DB.$DB_TABLE SET prod='$PROD' ,qa='$QA', dev='$DEV' WHERE module_name = '$CI_PROJECT_NAME' AND type = 'Service' AND category = 'Server' AND os = 'docker'"
elif [ $SOURCE_BRANCH == "feature" ]; then
((FEATURE=FEATURE+1)) && PATCH=0;
TAG_NAME=v$ILENS.$RELEASE.$FEATURE
IMAGE_URL=azrilensprod.azurecr.io/ilens/release/versions/v"$ILENS.$RELEASE:$CI_PROJECT_NAME-$TAG_NAME"
elif [ $SOURCE_BRANCH == "patch" ]; then
((PATCH=PATCH+1));
TAG_NAME=v$ILENS.$RELEASE.$FEATURE.$PATCH
IMAGE_URL=azrilensprod.azurecr.io/ilens/release/versions/v"$ILENS.$RELEASE:$CI_PROJECT_NAME-$TAG_NAME"
else
exit 1
fi
- echo -e "\n\nImage:" $IMAGE_URL >> ReleaseNote.txt
- sed -i "1s|^|Version":" $TAG_NAME\n|" ReleaseNote.txt
- sed -i "1s|^|Module Name":" $CI_PROJECT_NAME\n|" ReleaseNote.txt
- docker build -t $IMAGE_URL .
- docker push $IMAGE_URL
- docker rmi --force $IMAGE_URL
- URL=$(echo $CI_PROJECT_URL | sed 's|https://||')
- git remote set-url origin https://$GIT_USRNAME:$GIT_USRPASSWD@$URL
- git config user.email "devopsilens@gmail.com"
- git config user.name "$GIT_USRNAME"
- git tag -a $TAG_NAME -F ReleaseNote.txt
- git push origin $TAG_NAME
- $MYSQL_CONNECTION -e "UPDATE $VERSION_DB.$VERSION_RELEASE_TABLE SET release_version='$RELEASE', feature_version='$FEATURE', patch_version='$PATCH' WHERE module_name = '$CI_PROJECT_NAME' "
- $MYSQL_CONNECTION -e "INSERT INTO $HISTORY_DB.$VERSION_RELEASE_TABLE values('$CI_JOB_ID', '$CI_PROJECT_NAME','iLens', '$ILENS.$RELEASE.$FEATURE', '$CI_COMMIT_SHA', '$GITLAB_USER_NAME', '$CI_COMMIT_REF_NAME')"
tags:
- shell
only:
- master
#~~~~~| Requirements.txt version check |~~~~~#
package-version-check:
stage: validate
script:
- REQUIREMENTS=$(cat requirements.txt)
- FAILED=0
- >
for REQ in ${REQUIREMENTS[@]};
do
if [ "${REQ:0:1}" = "#" ]; then continue; fi
PKG=$(echo $REQ | tr = " " | awk '{print $1}')
VER=$(echo $REQ | tr = " " | awk '{print $2}')
VER=${VER//[^[:alnum:]]/}
if [ ! -z "${VER//[0-9]}" ] || [ -z $VER ]; then
echo " Package version not specified for: $PKG "
FAILED=`expr $FAILED + 1`
fi
done
- if [ $FAILED -gt 0 ]; then exit 1; fi
only:
- QA
tags:
- shell
#~~~~~| Vulnerability Scanner |~~~~~#
vulnerability-scanner:
stage: scan
script:
- QA=`expr $QA + 1` && DEV=0
- DOCKER_IMAGE=$CI_PROJECT_NAME:vulnarable-scan
- docker build -t $DOCKER_IMAGE .
- trivy image --format template --template "@/home/gitlab-runner/image-scanner/templates/html.tpl" -o imageScanner-$CI_PROJECT_NAME.html $DOCKER_IMAGE
- trivy image --format json -o imageScanner-$CI_PROJECT_NAME.json $DOCKER_IMAGE
- docker rmi --force $DOCKER_IMAGE
- mv imageScanner-$CI_PROJECT_NAME.html /data0/email-util/module/reports/
- >
if ! /home/gitlab-runner/image-scanner/severity_check imageScanner-$CI_PROJECT_NAME.json ; then
cd /home/gitlab-runner/image-scanner/
./mail imageScanner-$CI_PROJECT_NAME.html $DOCKER_IMAGE
fi
only:
- QA
tags:
- shell
#~~~~~| QA K8 |~~~~~#
qa-k8-deployment:
stage: deploy
script:
- REGISTRY_URL=azacrknowledgelens.azurecr.io/knowledgelens/products/ilens/qa
- export KUBECONFIG=/home/gitlab-runner/.kube/$QA_ENV
- NAMESPACE=ilens-core
- QA=`expr $QA + 1` && DEV=0
- docker build -t $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV .
- docker push $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV
- echo "Deploying $CI_PROJECT_NAME"
- >
for YML in ${DEPLOYMENT_YML[@]};
do
FILE_PATH=$HELM_CHART/$YML
SERVICE=$(echo $YML | cut -f 1 -d "." )
CURR_VERSION=$(cat $FILE_PATH | grep "imageName:" )
CURR_VERSION=$(echo $CURR_VERSION | cut -f 3 -d ":")
echo " Deploying $SERVICE"
echo " $SERVICE Version: $CURR_VERSION"
sed -E -i'' "s|(.*imageName:.*"$REGISTRY_URL"/).*|\1"$CI_PROJECT_NAME":v"$PROD.$QA.$DEV"|" $FILE_PATH
helm upgrade --install $SERVICE $HELM_CHART -f $FILE_PATH -f $VARIABLES_YML -n $NAMESPACE --history-max 1
if ! sh $STATUS_SCRIPT $SERVICE $NAMESPACE $TIMEOUT ; then
sed -E -i'' "s|(.*imageName:.*"$REGISTRY_URL"/).*|\1"$CI_PROJECT_NAME":"$CURR_VERSION"|" $FILE_PATH
helm upgrade --install $SERVICE $HELM_CHART -f $FILE_PATH -f $VARIABLES_YML -n $NAMESPACE --history-max 1
echo " $SERVICE Reverted to the previous version..."
exit 1
fi
UI_POD=$(kubectl get pods -n $NAMESPACE | grep ilens-ui | awk '{print $1}')
UI_POD_LIST=($UI_POD)
if [ ! -z "$UI_POD_LIST" ]; then
echo "Restarting Nginx"
for UI_POD_NAME in ${UI_POD_LIST[@]}; do
kubectl exec $UI_POD_NAME -n $NAMESPACE -- nginx -s reload
done
fi
done
only:
- QA
tags:
- shell
tag-update-qa:
stage: update
script:
- DEV=0
- REGISTRY_URL=azacrknowledgelens.azurecr.io/knowledgelens/products/ilens/qa
- docker rmi --force $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV
- QA=`expr $QA + 1` && DEV=0
- $MYSQL_CONNECTION -e "INSERT INTO $HISTORY_DB.$DB_TABLE values('$CI_JOB_ID','Server','Service', '$CI_PROJECT_NAME','docker', '$PROD.$QA.$DEV', '$CI_COMMIT_SHA', '$GITLAB_USER_NAME', '$CI_COMMIT_REF_NAME')"
- $MYSQL_CONNECTION -e "UPDATE $VERSION_DB.$DB_TABLE SET prod='$PROD' ,qa='$QA', dev='$DEV' WHERE module_name = '$CI_PROJECT_NAME' AND type = 'Service' AND category = 'Server' AND os = 'docker'"
dependencies:
- qa-k8-deployment
only:
- QA
tags:
- shell
# #~~~~~| JUBILANT PRE PROD |~~~~~#
# jubilant-pre-prod-deployment:
# stage: deploy
# script:
# - REGISTRY_URL=azacrknowledgelens.azurecr.io/knowledgelens/products/ilens/dev
# - export KUBECONFIG=/home/gitlab-runner/.kube/dev-cluster-config-bkp
# - YML_PATH_DEV=/home/gitlab-runner/kubernetes/ilens/pre-prod/jubilant
# - NAMESPACE=core-dev
# - DEV=`expr $DEV + 1`
# - docker build -t $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV .
# - docker push $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV
# - cd $YML_PATH_DEV
# - >
# for YML in ${DEPLOYMENT_YML[@]};
# do
# SERVICE=$(echo $YML | cut -f 1 -d "." )
# CURR_VERSION=$(cat $YML | grep "image:" )
# CURR_VERSION=$(echo $CURR_VERSION | cut -f 3 -d ":")
# echo " Deploying $SERVICE"
# echo " $SERVICE Version: $CURR_VERSION"
# sed -E -i'' "s|(.*image:.*"$REGISTRY_URL"/).*|\1"$CI_PROJECT_NAME":v"$PROD.$QA.$DEV"|" $YML
# kubectl delete -f $YML
# sleep 30
# kubectl apply -f $YML
# if ! sh $STATUS_SCRIPT $SERVICE $NAMESPACE $TIMEOUT ; then
# sed -E -i'' "s|(.*image:.*"$REGISTRY_URL"/).*|\1"$CI_PROJECT_NAME":"$CURR_VERSION"|" $YML
# kubectl apply -f $YML
# echo " $SERVICE Reverted to the previous version..."
# exit 1
# fi
# done
# only:
# - jubilant-pre-prod
# tags:
# - shell
# tag-update-dev:
# stage: update
# script:
# - DEV=`expr $DEV + 1`
# - $MYSQL_CONNECTION -e "INSERT INTO $HISTORY_DB.$DB_TABLE values('$CI_JOB_ID','Server','Service', '$CI_PROJECT_NAME','docker', '$PROD.$QA.$DEV', '$CI_COMMIT_SHA', '$GITLAB_USER_NAME', '$CI_COMMIT_REF_NAME')"
# - $MYSQL_CONNECTION -e "UPDATE $VERSION_DB.$DB_TABLE SET prod='$PROD' ,qa='$QA', dev='$DEV' WHERE module_name = '$CI_PROJECT_NAME' AND type = 'Service' AND category = 'Server' AND os = 'docker'"
# dependencies:
# - jubilant-pre-prod-deployment
# only:
# - jubilant-pre-prod
# tags:
# - shell
#~~~~~| DEV 220 |~~~~~#
dev-deployment-220:
stage: deploy
script:
- tar czvf $CI_PROJECT_NAME.tar.gz *
- echo "Deploying to the dev 220 server..."
- sshpass -p $OFC_PASSWD ssh $OFC_USERNAME@$OFC_HOSTNAME "mkdir -p /tmp/$CI_PROJECT_NAME/tar/"
- sshpass -p $OFC_PASSWD ssh $OFC_USERNAME@$OFC_HOSTNAME "mkdir -p /tmp/$CI_PROJECT_NAME/untar/"
- sshpass -p $OFC_PASSWD scp $CI_PROJECT_NAME.tar.gz $OFC_USERNAME@$OFC_HOSTNAME:/tmp/$CI_PROJECT_NAME/tar/
- sshpass -p $OFC_PASSWD ssh $OFC_USERNAME@$OFC_HOSTNAME "tar xzvf /tmp/$CI_PROJECT_NAME/tar/$CI_PROJECT_NAME.tar.gz -C /tmp/$CI_PROJECT_NAME/untar/"
- sshpass -p $OFC_PASSWD ssh $OFC_USERNAME@$OFC_HOSTNAME "rsync -r /tmp/$CI_PROJECT_NAME/untar/* /opt/services/ilens2.0/$CI_PROJECT_NAME/"
- sshpass -p $OFC_PASSWD ssh $OFC_USERNAME@$OFC_HOSTNAME "/home/svc-ilens/anaconda3/envs/pipe-counting/bin/pip install -r /opt/services/ilens2.0/$CI_PROJECT_NAME/requirements.txt"
- sshpass -p $OFC_PASSWD ssh $OFC_USERNAME@$OFC_HOSTNAME "sudo systemctl restart ilens_2.0_dev_$CI_PROJECT_NAME.service"
- sshpass -p $OFC_PASSWD ssh $OFC_USERNAME@$OFC_HOSTNAME "sudo systemctl status ilens_2.0_dev_$CI_PROJECT_NAME.service"
after_script:
- sshpass -p $OFC_PASSWD ssh $OFC_USERNAME@$OFC_HOSTNAME "rm -rf /tmp/$CI_PROJECT_NAME"
- rm -f $CI_PROJECT_NAME.tar.gz
only:
- develop
tags:
- shell
#~~~~~| PRE PROD |~~~~~#
pre-prod-deployment:
stage: deploy
script:
- REGISTRY_URL=azacrknowledgelens.azurecr.io/knowledgelens/products/ilens/dev
- SOURCE_BRANCH=$(echo $CI_COMMIT_BRANCH | cut -f 1 -d "-")
- >
if [ ! -z $SOURCE_BRANCH ]; then
echo "Deploying $CI_PROJECT_NAME module in the $CI_COMMIT_BRANCH environment"
else
echo "Create a proper branch name, current branch name $CI_COMMIT_BRANCH"
exit 1
fi
- DOCKER_COMPOSE=/opt/service/compose/$SOURCE_BRANCH/docker-compose.yml
- DEV=`expr $DEV + 1`
- docker build -t $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV .
- docker push $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV
- IMAGE_URL=$REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV
- PROJECT_NAME=$(echo $CI_PROJECT_NAME | cut -f 1 -d ".")
- >
if [ "$SOURCE_BRANCH" = "dalmia" ] || [ "$SOURCE_BRANCH" = "jubilant" ] ; then
OLD_IMAGE=$(sshpass -p $DEV_235_PASSWD ssh $DEV_235_USERNAME@$DEV_235_HOSTNAME "cat $DOCKER_COMPOSE | grep '&$PROJECT_NAME-image' | cut -f '3' -d ' ' ")
echo "Current image":" $OLD_IMAGE"
echo "New image":" $IMAGE_URL"
sshpass -p $DEV_235_PASSWD ssh $DEV_235_USERNAME@$DEV_235_HOSTNAME "sed -E -i'' 's|(.*image:.*)'$OLD_IMAGE'|\1'$IMAGE_URL'|1' '$DOCKER_COMPOSE'"
sshpass -p $DEV_235_PASSWD ssh $DEV_235_USERNAME@$DEV_235_HOSTNAME "sudo docker-compose -f $DOCKER_COMPOSE up -d"
elif [ "$SOURCE_BRANCH" = "hccb" ] || [ "$SOURCE_BRANCH" = "welspun" ] ; then
OLD_IMAGE=$(sshpass -p $DEV_236_PASSWD ssh $DEV_236_USERNAME@$DEV_236_HOSTNAME "cat $DOCKER_COMPOSE | grep '&$PROJECT_NAME-image' | cut -f '3' -d ' ' ")
echo "Current image":" $OLD_IMAGE"
echo "New image":" $IMAGE_URL"
sshpass -p $DEV_236_PASSWD ssh $DEV_236_USERNAME@$DEV_236_HOSTNAME "sed -E -i'' 's|(.*image:.*)'$OLD_IMAGE'|\1'$IMAGE_URL'|1' '$DOCKER_COMPOSE'"
sshpass -p $DEV_236_PASSWD ssh $DEV_236_USERNAME@$DEV_236_HOSTNAME "docker-compose -f $DOCKER_COMPOSE up -d"
else
echo "The pipeline supports the incremental deployment of only dalmia, jubilant, hccb, welspun pre-prods"
fi
only:
- dalmia-pre-prod
- jubilant-pre-prod
- hccb-pre-prod
- welspun-pre-prod
tags:
- shell
tag-update-dev:
stage: update
script:
- DEV=`expr $DEV + 1`
- REGISTRY_URL=azacrknowledgelens.azurecr.io/knowledgelens/products/ilens/dev
- docker rmi --force $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV
- $MYSQL_CONNECTION -e "INSERT INTO $HISTORY_DB.$DB_TABLE values('$CI_JOB_ID','Server','Service', '$PROJECT_NAME','docker', '$PROD.$QA.$DEV', '$CI_COMMIT_SHA', '$GITLAB_USER_NAME', '$CI_COMMIT_REF_NAME')"
- $MYSQL_CONNECTION -e "UPDATE $VERSION_DB.$DB_TABLE SET prod='$PROD' ,qa='$QA', dev='$DEV' WHERE module_name = '$PROJECT_NAME' AND type = 'Service' AND category = 'Server' AND os = 'docker'"
dependencies:
- pre-prod-deployment
only:
- dalmia-pre-prod
- jubilant-pre-prod
- hccb-pre-prod
- welspun-pre-prod
tags:
- shell
# #~~~~~| DEV Image Build |~~~~~#
# dev-image-build:
# stage: deploy
# script:
# - REGISTRY_URL=azacrknowledgelens.azurecr.io/knowledgelens/products/ilens/dev
# - DEV=`expr $DEV + 1`
# - docker build -t $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV .
# - docker push $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV
# - docker rmi --force $REGISTRY_URL/$CI_PROJECT_NAME:v$PROD.$QA.$DEV
# only:
# - develop
# tags:
# - shell
# tag-update-dev:
# stage: update
# script:
# - DEV=`expr $DEV + 1`
# - $MYSQL_CONNECTION -e "INSERT INTO $HISTORY_DB.$DB_TABLE values('$CI_JOB_ID','Server','Service', '$CI_PROJECT_NAME','docker', '$PROD.$QA.$DEV', '$CI_COMMIT_SHA', '$GITLAB_USER_NAME', '$CI_COMMIT_REF_NAME')"
# - $MYSQL_CONNECTION -e "UPDATE $VERSION_DB.$DB_TABLE SET prod='$PROD' ,qa='$QA', dev='$DEV' WHERE module_name = '$CI_PROJECT_NAME' AND type = 'Service' AND category = 'Server' AND os = 'docker'"
# dependencies:
# - dev-image-build
# only:
# - develop
# tags:
# - shell
#~~~~~| CODE QUALITY |~~~~~#
codequality:
stage: deploy
image: $SONAR_SCANNER_IMAGE
script:
- /opt/sonar-scanner/bin/sonar-scanner -Dsonar.projectKey=$CI_PROJECT_NAME -Dsonar.projectName=$CI_PROJECT_NAME -Dsonar.typescript.node=./node/node -Dsonar.login=admin -Dsonar.password=$SONAR_PASSWD -Dsonar.sources=.
- sleep 5
- python3 /opt/code_quality_report/static_code_quality_report_csv_v2.py $CI_PROJECT_NAME $GITLAB_USER_EMAIL,$EMAIL_TO $EMAIL_FROM $EMAIL_PASSWD False admin $SONAR_PASSWD
only:
- develop
tags:
- docker
import uvicorn
from fastapi import FastAPI
from scripts.core.services.Text_Extraction_Service import router
from scripts.config.app_configurations import *
app = FastAPI()
app.include_router(router)
if __name__ == "__main__":
uvicorn.run("app:app", host=HOST, port=PORT)
[LOG]
log_level=INFO
base_path=logs/
file_name=structure
handlers=console,file
logger_name=structure
[SERVICE]
host=localhost
port=8200
[PATH]
model_path = scripts/utility/model/yolov8l_r1_r2_lp_best.pt
# YOLOv5 requirements
# Usage: pip install -r requirements.txt
# Base ----------------------------------------
matplotlib>=3.2.2
numpy>=1.18.5
opencv-python>=4.1.1
Pillow>=7.1.2
PyYAML>=5.3.1
requests>=2.23.0
scipy>=1.4.1
#torch>=1.7.0 # see https://pytorch.org/get-started/locally/ (recommended)
#torchvision>=0.8.1
tqdm>=4.64.0
# protobuf<=3.20.1 # https://github.com/ultralytics/yolov5/issues/8012
# Logging -------------------------------------
tensorboard>=2.4.1
# clearml>=1.2.0
# comet
# Plotting ------------------------------------
pandas>=1.1.4
seaborn>=0.11.0
# Export --------------------------------------
# coremltools>=6.0 # CoreML export
# onnx>=1.9.0 # ONNX export
# onnx-simplifier>=0.4.1 # ONNX simplifier
# nvidia-pyindex # TensorRT export
# nvidia-tensorrt # TensorRT export
# scikit-learn<=1.1.2 # CoreML quantization
# tensorflow>=2.4.1 # TF exports (-cpu, -aarch64, -macos)
# tensorflowjs>=3.9.0 # TF.js export
# openvino-dev # OpenVINO export
# Deploy --------------------------------------
# tritonclient[all]~=2.24.0
# Extras --------------------------------------
ipython # interactive notebook
psutil # system utilization
thop>=0.1.1 # FLOPs computation
# mss # screenshots
# albumentations>=1.0.3
# pycocotools>=2.0 # COCO mAP
# roboflow
python-dotenv==0.21.0
fastapi==0.86.0
python-multipart==0.0.5
click==8.1.3
h11==0.14.0
uvicorn==0.19.0
ultralytics
paddleocr==2.7.0.3
paddlepaddle==2.5.2
import configparser
config = configparser.ConfigParser()
config.read('conf/application.conf')
LOG_LEVEL = config.get('LOG', 'log_level')
LOG_BASEPATH = config.get('LOG', 'base_path')
LOG_FILE_NAME = LOG_BASEPATH + config.get('LOG', 'file_name')
LOG_HANDLERS = config.get('LOG', 'handlers')
LOGGER_NAME = config.get('LOG', 'logger_name')
HOST = config.get('SERVICE', 'host')
PORT = config.getint('SERVICE', 'port')
MODEL_PATH = config.get('PATH', 'model_path')
\ No newline at end of file
class BaseName:
baseurl = "/Text_Extraction"
class TEXT_EXTRACT:
text = BaseName.baseurl + "/value"
class Image:
upload="/upload_image"
from paddleocr import PaddleOCR
from ultralytics import YOLO
import torch
import cv2
from scripts.config.app_configurations import *
device = 'cuda' if torch.cuda.is_available() else 'cpu'
# print(f'Using device: {device}')
def load_ocr_model():
ocr = PaddleOCR(use_angle_cls=True, lang='en')
return ocr
def load_yolo_model():
model = YOLO(MODEL_PATH).to(device)
return model
def predict(img_path, ocr, yolo_model):
img = cv2.imread(img_path)
results = yolo_model.predict(img, iou=0.7)
if results:
r1_txt = None
r2_txt = None
for r in results:
# annotator = Annotator(img)
boxes = r.boxes
for box in boxes:
b = box.xyxy[0] # get box coordinates in (top, left, bottom, right) format
# print("bb: ",b.tolist())
x1 = int(b[0])
y1 = int(b[1])
x2 = int(b[2])
y2 = int(b[3])
w = abs(x2 - x1)
h = abs(y2 - y1)
cv2.rectangle(img, (x1, y1), (x2, y2), (255, 0, 0), 1)
# lp = img[max(y1 - 30, 0):min(y1+h + 30, img.shape[0]), max(x1 - 30, 0):min(x1+w + 30, img.shape[1])]
lp = img[y1:y1 + h, x1:x1 + w]
c = box.cls
# print("class: ", c)
# annotator.box_label(b, model.names[int(c)])
# img = annotator.result()
# cv2.imshow('YOLO V8 Detection', cv2.resize(img, (900, 600)))
if int(c) == 0:
result = ocr.ocr(lp, cls=True)
for idx in range(len(result)):
res = result[idx]
text_x = 10
text_y = 30
if res:
for ind, line in enumerate(res):
# print("line")
# print(line)
text_tuple = line[1]
if r1_txt is None:
r1_txt = text_tuple[0]
print("R1 TEXT: ", r1_txt)
text_conf = text_tuple[1]
elif int(c) == 1:
result = ocr.ocr(lp, cls=True)
for idx in range(len(result)):
res = result[idx]
text_x = 10
text_y = 30
if res:
for ind, line in enumerate(res):
# print("line")
# print(line)
text_tuple = line[1]
if r2_txt is None:
r2_txt = text_tuple[0]
print("R2 TEXT: ", r2_txt)
text_conf = text_tuple[1]
# cv2.putText(lp, txt, (text_x , text_y + ind*50), cv2.FONT_HERSHEY_SIMPLEX , 1, (255, 0, 0), 2, cv2.LINE_AA)
# cv2.imshow('detected LPR', lp)
# cv2.waitKey(0)
#
if r1_txt is not None and r2_txt is not None:
LP = f"{r1_txt} {r2_txt}"
# print("LP: ", LP)
return LP
return "Image Not Clear"
# result = ocr.ocr(img_path, cls=False,det=True,rec=True)[0]
# txts = [line[1][0] for line in result]
from fastapi import APIRouter
from fastapi import File
import cv2
import numpy as np
import io
from scripts.core.handler.Text_Extraction_Handler import load_ocr_model,load_yolo_model, predict
from scripts.logging.application_logging import logger
from scripts.config.app_constants import TEXT_EXTRACT
ocr = load_ocr_model()
yolo_model = load_yolo_model()
router = APIRouter()
@router.post(TEXT_EXTRACT.text)
def extract_license_number(file: bytes = File(...)):
try:
stream = io.BytesIO(file)
image = np.asarray(bytearray(stream.read()), dtype="uint8")
image = cv2.imdecode(image, cv2.IMREAD_COLOR)
cv2.imwrite("test.jpg", image)
image_path = "test.jpg"
text = predict(image_path, ocr, yolo_model)
logger.info("text detected")
return text
except Exception as e:
logger.error("Error is :{}".format(e))
import logging.handlers
import os
import sys
import time
from logging import StreamHandler
from scripts.config import app_configurations
LOG_HANDLERS = app_configurations.LOG_HANDLERS
log_level = app_configurations.LOG_LEVEL
log_file = os.path.join(app_configurations.LOG_FILE_NAME + "_" + time.strftime("%Y%m%d") + '.log')
logger = logging.getLogger(app_configurations.LOGGER_NAME)
logger.setLevel(log_level)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(filename)s - %(module)s: %(funcName)s: '
'%(lineno)d - %(message)s')
if 'console' in LOG_HANDLERS:
# Adding the log Console handler to the logger
console_handler = StreamHandler(sys.stdout)
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
# if 'file' in LOG_HANDLERS:
# # Adding the log file handler to the logger
# file_handler = logging.FileHandler(log_file)
# file_handler.setFormatter(formatter)
# logger.addHandler(file_handler)
\ No newline at end of file
test.jpg

550 KB

Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment