Commit 7522b7c6 authored by tarun2512's avatar tarun2512

first commit

parents
#Ignore the logs directory
logs/
#Ignoring the password file
passwords.txt
#Ignoring git and cache folders
.git
.cache
.gitignore
.gitlab-ci.yml
variables.yml
#Ignoring all the markdown and class files
*.md
**/*.class
.env
__pycache__
*.pyc
*.pyo
*.pyd
.Python
.env
pip-log.txt
pip-delete-this-directory.txt
.tox
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*,cover
*.log
# Created by .ignore support plugin (hsz.mobi)
### JetBrains template
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
reports/*.pdf
reports/*.csv
reports/*.xlsx
# User-specific stuff
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
.idea
logs
# Generated files
.idea/**/contentModel.xml
# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# Gradle
.idea/**/gradle.xml
.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# CMake
cmake-build-*/
# Mongo Explorer plugin
.idea/**/mongoSettings.xml
# File-based project format
*.iws
# IntelliJ
out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# Editor-based Rest Client
.idea/httpRequests
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
### Python template
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
### VisualStudio template
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
# User-specific files
*.rsuser
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Mono auto generated files
mono_crash.*
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
[Ww][Ii][Nn]32/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Ll]og/
[Ll]ogs/
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUnit
*.VisualState.xml
TestResult.xml
nunit-*.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET Core
project.lock.json
project.fragment.lock.json
artifacts/
# ASP.NET Scaffolding
ScaffoldingReadMe.txt
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*_wpftmp.csproj
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
coverage*.json
coverage*.xml
coverage*.info
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# NuGet Symbol Packages
*.snupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
*.appxbundle
*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- [Bb]ackup.rdl
*- [Bb]ackup ([0-9]).rdl
*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# CodeRush personal settings
.cr/personal
# Python Tools for Visual Studio (PTVS)
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
# Ionide (cross platform F# VS Code tools) working folder
.ionide/
# Fody - auto-generated XML schema
FodyWeavers.xsd
data/*
### JupyterNotebooks template
# gitignore template for Jupyter Notebooks
# website: http://jupyter.org/
*/.ipynb_checkpoints/*
# IPython
# Remove previous ipynb_checkpoints
# git rm -r .ipynb_checkpoints/
.env
data
.temp
#!/usr/bin bash
pip install ruff black --upgrade
ruff scripts
black scripts --check
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:
- id: end-of-file-fixer
- id: trailing-whitespace
- id: requirements-txt-fixer
- repo: https://github.com/asottile/pyupgrade
rev: v3.8.0
hooks:
- id: pyupgrade
args:
- --py3-plus
- --keep-runtime-typing
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.275
hooks:
- id: ruff
args:
- --fix
- repo: https://github.com/psf/black
rev: 23.3.0
hooks:
- id: black
# It is recommended to specify the latest version of Python
# supported by your project here, or alternatively use
# pre-commit's default_language_version, see
# https://pre-commit.com/#top_level-default_language_version
language_version: python3.10
# iLens - DevOps
### GitFlow
Below is the list of branches for which CI/CD is configured.
### Branches
| Branch | Description | URL |
| --------- | ------------------------------------------------------------------------- | -------------------------------------------------------------- |
| `master` | Tag and Release the new Version. |-|
| `QA` | Deploy to Non-production, testing environment - AKS. | https://qa.ilens.io/ |
| `develop` | Deploy to the 220-Server and Dev-Kubernetes Cluster. | http://192.168.0.220/dev_master/ and http://192.168.0.236/dev_master/ |
| `feature/<feature_name>` | This holds the code base for feature update. |-|
| `patch/<patch_name>` | This holds the code base for patch update. |-|
- There are two environments for Development Team in which, one of them is a self hosted Kubernetes Cluster - http://192.168.0.236/dev_master/ and the other environment where source code is deployed - http://192.168.0.220/dev_master/ .
- The QA Environment is a AKS Cluster - https://qa.ilens.io/
- Production environments are all client environments.
### Merge Requests
1. When a Merge Request is raised that targets `develop` and `QA` branches, pipelines will be triggered for the deployment in the respective environments if the merge is completed.
1. When a feature update has to be made, a new branch named **`feature/`<feature_name>** has to be created from `master` branch. Once development is completed, the code should be merged back to `master` branch for which auto-tagging will happen.
1. When a patch update has to be made, a new branch named **`patch/`<patch_name>** has to be created from `master` branch. Once development is completed, the code should be merged back to `master` branch for which auto-tagging will happen.
The same is depicted in the below diagram:
<img src="https://gitlab-pm.knowledgelens.com/KnowledgeLens/Products/iLens-2.0/core/devops/scripts/-/raw/auto-tagging-ci/Patch-Feature-Flow.png" alt="Merge request"/>
## Stage 1
FROM python:3.10-slim-bullseye AS builder
WORKDIR /code
ARG PIP_EXTRA_INDEX_VALUE
ENV UV_EXTRA_INDEX_URL=$PIP_EXTRA_INDEX_VALUE
COPY requirements.txt /code/requirements.txt
RUN pip install uv && uv venv && uv pip install -r requirements.txt && rm requirements.txt
COPY scripts/ /code/scripts
COPY app.py __version__.py main.py pyproject.toml /code/
## Stage 2
FROM azrilensprod.azurecr.io/ftdm/base-images/python:python-3.10.14
RUN groupadd --gid 2000 nonroot && useradd --uid 1000 --gid 2000 -m nonroot
WORKDIR /code
COPY --from=builder /code /code
ENV PATH="/code/.venv/bin:$PATH"
USER nonroot
CMD [ "python", "app.py" ]
# global-catalog
Release Note:
- version - v1
Features:
First Catalog Version
__version__ = "V1"
from scripts.config import Service
if __name__ == "__main__":
from dotenv import load_dotenv
load_dotenv()
import argparse
import gc
import uvicorn
from main import app
from scripts.logging import logger
gc.collect()
ap = argparse.ArgumentParser()
if __name__ == "__main__":
app.root_path = "/ftdm-catalog"
ap.add_argument(
"--port",
"-p",
required=False,
default=Service.PORT,
help="Port to start the application.",
)
ap.add_argument(
"--bind",
"-b",
required=False,
default=Service.HOST,
help="IP to start the application.",
)
arguments = vars(ap.parse_args())
logger.info(f"App Starting at {arguments['bind']}:{arguments['port']}")
uvicorn.run("main:app", host=arguments["bind"], port=int(arguments["port"]))
-----BEGIN RSA PRIVATE KEY-----
MIICWwIBAAKBgQClilTaeHq6Zc+kWHCNl1O0btGRm7ct3O5zqWx1mwwLUWH14eft
Hi5wIbOYh79JQ9BO2OA4UjPq31uwmJ96Okl0OULfENhwd/D7P3mnoRlktPT2t+tt
RRrKvx3wNpOy/3nBsXnNt8EKxyA7k9vbqLbv9pGw2hcqOYe/NGTkmm1PswIDAQAB
AoGAZPARR1l5NBkKYGKQ1rU0E+wSmx+AtVVmjF39RUSyNmB8Q+poebwSgsr58IKt
T6Yq6Tjyl0UAZTGmferCK0xJJrqyP0hMn4nNNut+acWMKyt+9YrA2FO+r5Jb9JuT
SK35xXnM4aZLGppgWJxRzctpIz+qkf6oLRSZme0AuiqcwYECQQDY+QDL3wbWplRW
bze0DsZRMkDAkNY5OCydvjte4SR/mmAzsrpNrS5NztWbaaQrefoPbsdYBPbd8rS7
C/s/0L1zAkEAw1EC5zt2STuhkcKLa/tL+bk8WHHHtf19aC9kBj1TvWBFh+JojWCo
86iK5fLcHzhyQx5Qi3E9LG2HvOWhS1iUwQJAKbEHHyWW2c4SLJ2oVXf1UYrXeGkc
UNhjclgobl3StpZCYAy60cwyNo9E6l0NR7FjhG2j7lzd1t4ZLkvqFmQU0wJATLPe
yQIwBLh3Te+xoxlQD+Tvzuf3/v9qpWSfClhBL4jEJYYDeynvj6iry3whd91J+hPI
m8o/tNfay5L+UcGawQJAAtbqQc7qidFq+KQYLnv5gPRYlX/vNM+sWstUAqvWdMze
JYUoTHKgiXnSZ4mizI6/ovsBOMJTb6o1OJCKQtYylw==
-----END RSA PRIVATE KEY-----
-----BEGIN PUBLIC KEY-----
MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQClilTaeHq6Zc+kWHCNl1O0btGR
m7ct3O5zqWx1mwwLUWH14eftHi5wIbOYh79JQ9BO2OA4UjPq31uwmJ96Okl0OULf
ENhwd/D7P3mnoRlktPT2t+ttRRrKvx3wNpOy/3nBsXnNt8EKxyA7k9vbqLbv9pGw
2hcqOYe/NGTkmm1PswIDAQAB
-----END PUBLIC KEY-----
{
"project_id": "project_139",
"task_id": "task_10625",
"triggers": {
"dateTime": "2023-09-14T11:49:26+0530"
},
"tz": "Asia/Calcutta",
"advanced_configuration": {
"date": null,
"date_range": {
"from_date": null,
"to_date": null
},
"monthly": null,
"year": null,
"date_type": null
},
"stage_id": "54vATp9W58kL39kZpRn9Tt",
"project_type": "customer",
"language": "en"
}
{
"type": "save",
"tz": "Asia/Calcutta",
"project_id": "project_139",
"stage_id": "54vATp9W58kL39kZpRn9Tt",
"current_status": "IN PROGRESS",
"task_id": "task_10625",
"triggers": {
"dateTime": "2023-09-14T11:49:26+0530"
},
"submitted_data": {
"data": {
"textField": "",
"dg": "",
"time6": "02:00:00",
"fic_323_2_00_am": "a",
"li_352_2_00_am": ""
}
},
"stages": [
"54vATp9W58kL39kZpRn9Tt"
],
"project_type": "customer",
"language": "en"
}
APP_NAME=global_catalog
MONGO_URI=mongodb://ilens:ilens4321@infra-mongodb:27017/?authSource=admin
SECURITY_IP_CHECK=false
SECURITY_USER_CHECK=true
SECURITY_AGENT_CHECK=true
LOG_LEVEL=INFO
LOG_TRACEBACK=true
BASE_PATH=/code/data
MOUNT_DIR=/global-catalog
REDIS_URI=redis://infra-redis:6379
#MQTT
MQTT_HOST=infra-mqtt
MQTT_PORT=1883
MQTT_AUTH=
MQTT_USERNAME=
MQTT_PASSWORD=
SW_DOCS_URL=/docs
SW_OPENAPI_URL=/openapi.json
ENABLE_CORS=False
CORS_URLS=staging.ilens.io
SECURE_COOKIE=False
INTERNAL_DCP_URL=http://device-control-plane:8558
INTERNAL_DIGITAL_TWIN_URL=http://digital-twin-service:5555
INTERNAL_META_SERVICES_URL=http://metadata-services:8989
INTERNAL_VISUALIZATION_SERVICES_URL=http://visualization4:1112
INTERNAL_GLOBAL_CATALOG_URL=http://global-catalog:5001
INTERNAL_HIERARCHY_SERVICES_URL=http://hierarchy-services:7008
GLOBAL_CATALOG_PROXY_GC=/global_catalog_gc
SELF_PROXY=https://dev.unifytwin.com
GLOBAL_CATALOG_SERVICES=https://dev.unifytwin.com
GLOBAL_CATALOG_USER=user_097
GLOBAL_CATALOG_PROJECT_ID=project_252
#GLOBAL_CATALOG_PROJECT_ID=project_171
DIGITAL_TWIN_IMAGES_PATH=/code/data/digital_twin_services/images/asset_model
VERIFY_SIGNATURE=False
GC_BEARER_TOKEN=Z2xvYmFsLWNhdGFsb2ctbG9naW4tdG9rZW4=
REQUEST_TIMEOUT=30
DIGEST_USER=AllGoodNamesRGone
DIGEST_PW=comBRANSeANtamasEbICaPeC
EMAIL_SERVICE_PROXY=https://cloud.ilens.io/sms-util
GIT_URL="https://gitlab-pm.knowledgelens.com/api/v4/projects"
GIT_BRANCH="master"
GIT_USERNAME="tarun.madamanchi"
GIT_ACCESS_TOKEN="xYVrEZdLqbvpVW9SW3fm"
import gc
import os
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from dotenv import load_dotenv
from pytz import utc
from ut_security_util import FastAPIConfig, generate_fastapi_app
from scripts.background import fetch_and_update_gc_login_token
from scripts.config import Service
from scripts.constants import AppSpec
from scripts.logging import logger
from scripts.services import router
load_dotenv()
secure_access = os.environ.get("SECURE_ACCESS", default=False)
gc.collect()
Schedule = None
app_config = FastAPIConfig(
title=AppSpec.name,
version="V1",
description=AppSpec.description,
root_path="/catalog-management",
redoc_url="/redoc",
)
app = generate_fastapi_app(
app_config=app_config,
routers=[router],
project_name=Service.APP_NAME,
enable_default_openapi=True,
disable_operation_default=True,
)
@app.on_event("startup")
async def startup_event():
global Schedule
try:
Schedule = AsyncIOScheduler()
Schedule.configure(timezone=utc)
fetch_and_update_gc_login_token()
Schedule.add_job(fetch_and_update_gc_login_token, trigger="interval", minutes=30, misfire_grace_time=180)
Schedule.start()
logger.info("Created a schedule job to fetch and update global catalog login token in every 30 minutes.")
except Exception as e:
logger.error(e)
logger.error("Unable to create schedule job for fetching and updating global catalog login token.")
[global]
extra-index-url = http://192.168.0.207:8481/simple/
trusted-host = 192.168.0.207
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[tool.hatch.version]
path = "__version__.py"
[project]
name = "workflow-management2.0"
dynamic = ["version"]
description = "a catalog manager for FTDM."
readme = "README.md"
requires-python = ">=3.10"
authors = [
{ name = "Tarun Madamanchi", email = "tarun.madamanchi@rockellautomation.com" },
]
classifiers = [
"Development Status :: 2 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: Implementation :: PyPy",
]
[tool.black]
line-length = 120
target-version = ['py310']
[tool.ruff]
select = [
"E", # pycodestyle errors
"W", # pycodestyle warnings
"F", # pyflakes
"I", # isort
"C", # flake8-comprehensions
"B", # flake8-bugbear
]
ignore = [
"E501", # line too long, handled by black
"B008", # do not perform function calls in argument defaults
"C901", # too complex
"E402",
"B904",
"B905",
"B009",
"C417"
]
[tool.ruff.per-file-ignores]
"__init__.py" = ["F401"]
[tool.coverage.report]
precision = 2
fail_under = 50
show_missing = true
skip_covered = true
exclude_lines = [
"pragma: no cover",
"pragma: nocover",
"if TYPE_CHECKING:",
"if typing.TYPE_CHECKING:",
"raise NotImplementedError"
]
APScheduler==3.10.4
bcrypt~=4.0.1
click==7.1.2
croniter==0.3.34
cryptography==41.0.4
fastapi>=0.115.2
faust==1.10.4
httpx~=0.25.0
paho-mqtt==2.0.0
pillow==10.3.0
pycryptodome~=3.19.1
pydantic~=2.7.3
python-dotenv==1.0.1
pytz==2024.1
shortuuid==1.0.12
ut-mongo-util[stable,encryption]==1.1.1
ut-redis-connector[stable]==0.3.1
ut-security-util[stable,encryption]==1.2.12
uvicorn[standard]>=0.23.0
venusian==1.2.0
import json
import httpx
from scripts.config import Service
from scripts.logging import logger
def fetch_and_update_gc_login_token():
headers = {"auth-token": Service.GC_BEARER_TOKEN}
params = {
"project_id": Service.GLOBAL_CATALOG_PROJECT_ID,
"user_id": Service.GLOBAL_CATALOG_USER,
"age": 60,
}
global_workflow_data_url = f"{Service.GLOBAL_CATALOG_SERVICES}/ilens_api/ilens_config/gc/login"
logger.info(f"catalog_create_url url {global_workflow_data_url}")
with httpx.Client() as client:
login_token = client.get(url=global_workflow_data_url, params=params, headers=headers, timeout=Service.TIMEOUT)
if login_token.status_code == 200:
logger.info(f"login_token: {json.loads(login_token.text)}")
login_token = json.loads(login_token.text)
logger.info("Adding login token to app configuration")
Service.GLOBAL_CATALOG_TOKEN = login_token
else:
logger.error(f"Internal Server Error: {login_token.status_code}")
import os
import pathlib
import shutil
import sys
from typing import Annotated, Any, Optional
from dotenv import load_dotenv
from pydantic.functional_validators import BeforeValidator
from pydantic.v1 import BaseSettings, Field, root_validator
load_dotenv()
PROJECT_NAME = "ftdm-catalog"
def options_decoder(v):
if isinstance(v, str):
return v.split(",")
return v
OptionsType = Annotated[Any, BeforeValidator(options_decoder)]
class _Service(BaseSettings):
MODULE_NAME: str = Field(default="Catalog-Management")
APP_NAME: str = Field(default="catalog-management")
HOST: str = Field(default="0.0.0.0")
PORT: int = Field(default=45561)
LOG_LEVEL: str = Field(default="INFO")
ENABLE_FILE_LOG: Optional[Any] = False
ENABLE_CONSOLE_LOG: Optional[Any] = True
GLOBAL_CATALOG_SERVICES: str = Field(default="")
GLOBAL_CATALOG_PROJECT_ID: str = Field(default="")
GLOBAL_CATALOG_USER: str = Field(default="")
GLOBAL_CATALOG_TOKEN: str = Field(default="")
GC_BEARER_TOKEN: str = Field(default="")
TIMEOUT: int = Field(default=60)
REFRESH_TOKEN_DURATION: int = Field(default=168)
COOKIE_MAX_AGE_IN_MINS: int = Field(default=60)
@root_validator(allow_reuse=True)
def validate_values(cls, values):
values["LOG_LEVEL"] = values["LOG_LEVEL"] or "INFO"
print(f"Logging Level set to: {values['LOG_LEVEL']}")
return values
class _StoragePaths(BaseSettings):
MODULE_NAME: str = "catalog-v2"
BASE_PATH: str
REPORT_PATH: str = Field(None)
@root_validator(allow_reuse=True)
def assign_values(cls, values):
values["BASE_PATH"] = os.path.join("data", values.get("MODULE_NAME"))
if not values["BASE_PATH"]:
print("Error, environment variable BASE_PATH not set")
sys.exit(1)
values["REPORT_PATH"] = os.path.join(values.get("BASE_PATH"), "reports")
return values
class _PathToStorage(BaseSettings):
BASE_PATH: pathlib.Path = Field(None, env="BASE_PATH")
MOUNT_DIR: pathlib.Path = Field(None, env="MOUNT_DIR")
TEMP_PATH: pathlib.Path = Field(None, env="TEMP_PATH")
MODULE_PATH: Optional[pathlib.Path]
CAPTCHA: str = Field(default="captcha")
CAPTCHA_PATH: Optional[pathlib.Path]
@root_validator(allow_reuse=True)
def assign_values(cls, values):
values["LOGS_MODULE_PATH"] = os.path.join(values.get("BASE_PATH"), "logs", values.get("MOUNT_DIR"))
values["MODULE_PATH"] = os.path.join(values.get("BASE_PATH"), values.get("MOUNT_DIR"))
values["CAPTCHA_PATH"] = os.path.join(values.get("BASE_PATH"), values.get("CAPTCHA"))
return values
@root_validator(allow_reuse=True)
def validate_values(cls, values):
if not values["BASE_PATH"]:
print("Error, environment variable BASE_PATH not set")
sys.exit(1)
if not values["MOUNT_DIR"]:
print("Error, environment variable MOUNT_DIR not set")
sys.exit(1)
return values
class _KeyPath(BaseSettings):
KEYS_PATH: Optional[pathlib.Path] = Field(default="data/keys")
PUBLIC: Optional[pathlib.Path]
PRIVATE: Optional[pathlib.Path]
@root_validator(allow_reuse=True)
def assign_values(cls, values):
if not os.path.isfile(os.path.join(values.get("KEYS_PATH"), "public")) or not os.path.isfile(
os.path.join(values.get("KEYS_PATH"), "private")
):
if not os.path.exists(values.get("KEYS_PATH")):
os.makedirs(values.get("KEYS_PATH"))
shutil.copy(os.path.join("assets", "keys", "public"), os.path.join(values.get("KEYS_PATH"), "public"))
shutil.copy(os.path.join("assets", "keys", "private"), os.path.join(values.get("KEYS_PATH"), "private"))
values["PUBLIC"] = os.path.join(values.get("KEYS_PATH"), "public")
values["PRIVATE"] = os.path.join(values.get("KEYS_PATH"), "private")
return values
class _MQTTConf(BaseSettings):
MQTT_HOST: str
MQTT_PORT: int
MQTT_USERNAME: str
MQTT_PASSWORD: str
PUBLISH_BASE_TOPIC: str = "ilens/notifications"
class _KeyCloakConf(BaseSettings):
keycloak_url: Optional[str]
keycloak_realm: Optional[str]
keycloak_admin_user: Optional[str]
keycloak_admin_password: Optional[str]
class _UnifyTwinEmailConf:
UT_EMAIL_SERVICE: Optional[str] = Field(default="UT_EMAIL_SERVICE")
URL: Optional[str]
DIGEST_USER: Optional[str] = Field(default="DIGEST_USER")
DIGEST_PW: Optional[str] = Field(default="DIGEST_PW")
@root_validator(allow_reuse=True)
def assign_values(cls, values):
values["URL"] = f"{values.get('UT_EMAIL_SERVICE')}/api/v1/eim/email/send"
return values
class _RedisConf(BaseSettings):
REDIS_URI: str
REDIS_LOGIN_DB: int = Field(default=9)
REDIS_PROJECT_DB: int = Field(default=18)
REDIS_USER_ROLE_DB: int = Field(default=21)
REDIS_LICENSE_CHECK_DB: int = Field(default=70)
class _Security(BaseSettings):
SECURE_COOKIE: bool = Field(default=True)
VERIFY_SIGNATURE: bool = Field(default=False)
PROTECTED_HOSTS: list = os.environ.get("PROTECTED_HOSTS", default="").split(",")
PASSWORD_DECRYPTION_KEY: str = "QVY1bWdMQ0Zxc"
DISABLE_ENC: bool | str = Field(default=False) in ["True", "true", True]
VALIDATE_LIMIT: bool | str = Field(default=False) in ["True", "true", True]
Service = _Service()
StoragePaths = _StoragePaths()
PathToStorage = _PathToStorage()
KeyCloakConf = _KeyCloakConf()
KeyPath = _KeyPath()
MQTTConf = _MQTTConf()
Security = _Security()
RedisConf = _RedisConf()
UnifyTwinEmailConf = _UnifyTwinEmailConf()
__all__ = [
"PROJECT_NAME",
"Service",
"StoragePaths",
"PathToStorage",
"KeyPath",
"MQTTConf",
"KeyCloakConf",
"Security",
"RedisConf",
"UnifyTwinEmailConf",
]
import os
class AppSpec:
name = "ftdm-catalog"
summary = """A module which manages catalog spaces and responsible for reviewal and approval of
items which is publishing to catalog"""
description = """## Overview
This microservice handles the creation and management of catalog spaces, supporting both private and public spaces.
It facilitates the submission of dashboards, apps, and assets to the catalog, where they await review and approval by
designated approvers in both private and public spaces. Once approved, the content can be published. Additionally,
the service enables cloning of published content across different projects, allowing connection to both private and
public spaces to access and duplicate previously published items.
"""
STARTING_VALUE_ERROR = "Input Field-Value shouldn't start with a Special Character.."
STRING_VALUE_ERROR = "Input Field Consists of Unsupported Special Character..."
STARTING_VALIDATION = set(os.environ.get("STRING_VALIDATION_LIST1", default="=,<,+,!,@").split(","))
STRING_VALIDATION = set(os.environ.get("STRING_VALIDATION_LIST2", default="<").split(","))
WEBAPP_CONSTANTS = ["webapp_constants", "app_version"]
class KEYS:
cookie_encryption_private_key = "#ilenskey@rock1#"
captcha_cookie_encryption_key = "UnifyTwin@r@ck1$"
class MongoConstants:
mongo_push_stage = "$push"
mongo_group_stage = "$group"
mongo_null_stage = "$ifNull"
mongo_project_stage = "$project"
mongo_match_stage = "$match"
mongo_option_stage = "$options"
mongo_regex_stage = "$regex"
access_group_id_mongo = "$access_group_id"
array_to_object_mongo = "$arrayToObject"
class Secrets:
LOCK_OUT_TIME_MINS = 30
leeway_in_mins = 10
unique_key = "45c37939-0f75"
token = "8674cd1d-2578-4a62-8ab7-d3ee5f9a"
issuer = "ilens"
alg = "RS256"
SECRET_FOR_SUPPORT_LENS = "WeSupport24X7UnifyTwinX#"
ISS = "unifytwin"
AUD = "supportlens"
signature_key = "kliLensKLiLensKL"
signature_key_alg = ["HS256"]
class CommonKeys:
KEY_SHARED_GROUP = "shared_group"
KEY_SHARED_USER = "shared_user"
KEY_USER_DETAILS = "user_details"
KEY_SHARING_INFO = "sharing_info"
KEY_LAST_UPDATED_TIME = "lastUpdatedTime"
KEY_LAST_FAILED_ATTEMPT = "last_failed_attempt"
KEY_USER_ID = "user_id"
KEY_PROCESS_TEMPLATE = "process_template"
KEY_SITE_TEMPLATE = "site_template"
KEY_PROCESS_TEMPLT_ID = "process_templt_id"
KEY_KEY_LIST = "key_list"
HIERARCHY_ACCESS_KEY_LIST = "access_key_list"
KEY_VALUE = "value"
KEY_SITE_TEMPLT_ID = "site_templt_id"
KEY_TYPE = "type"
KEY_PROJECT_ID = "project_id"
KEY_NAME = "name"
KEY_CREATED_BY = "created_by"
KEY_CREATED_ON = "created_on"
KEY_CUSTOMER_PROJ_ID = "customer_project_id"
KEY_CONTENT_TYPE = "content_type"
KEY_UPDATED_BY = "updated_by"
KEY_UPDATED_ON = "updated_on"
KEY_MACHINE_BUILDER_PROJECT = "machine_builder"
KEY_CUSTOMER_PROJECT = "customer"
KEY_N_LEVEL_HIERARCHY_PROJECT = "n_level_hierarchy"
KEY_GRAPH_MODEL = "graph_model"
KEY_MACHINE_BUILDER_LABEL = "Machine Builder Project"
KEY_CUSTOMER_PROJECT_LABEL = "Customer Project - Fixed Level Hierarchy"
KEY_N_LEVEL_HIERARCHY_LABEL = "Customer Project - N Level Hierarchy"
KEY_GATEWAY_TYPE = "line"
KEY_SENSOR_TYPE = "equipment"
KEY_CATEGORY_NAME = "categoryName"
KEY_DASHBOARD_CATEGORY_ID = "dashboard_category_id"
KEY_CREATE_NEW = "Create New"
KEY_ACTIVITY_NAME = "Activity Name"
KEY_ZIP_CODE = "Zip Code"
KEY_MAKE_MODEL = "Make & Model"
KEY_MACHINE_ID_EQUIPMENT_CODE = "Machine ID/ Equipment Code"
KEY_ALREADY_EXIST = "Already Exist"
KEY_STAGING_URL_OBJECT_DETECTION = "https://staging.ilens.io/ilens-manager/#/p/apps/object-detection"
KEY_INVENTORY_WAREHOUSE = ("Inventory by Warehouse",)
KEY_ASIA_KOLKATA = "Asia/Kolkata"
KEY_REGEX = "$regex"
KEY_OPTIONS = "$options"
KEY_TARGET_HEADER_ID = "target_configuration"
KEY_HEADER_ID = "header_id"
KEY_LEVEL = "level"
LEVEL_KEY = "level_key"
KEY_DESCRIPTION = "description"
KEY_SUB_CATEGORY_NAME = "sub_category_name"
TYPE = "type"
KEY_EMAIL_ID = "email"
KEY_APPROVED = "approved"
KEY_REJECT = "rejected"
KEY_REVIEW_PENDING = "review_pending"
KEY_ISDEL = "isdeleted"
STD_TIME_ZONE = "Asia/Kolkata"
category = "dashboard_category_099"
under_review_json = {"type": "switch", "key": "under_review", "filterLabel": "Show Review Pending Items only."}
KEY_PUSH = "$push"
KEY_SORT = "$sort"
KEY_ARRAY_ELEMENT = "$arrayElemAt"
KEY_VALUE_MONGO = "$value"
KEY_PROJECT = "$project"
KEY_MATCH = "$match"
KEY_EXISTS = "$exists"
KEY_GROUP = "$group"
KEY_ACCESS_GRP = "access_group"
KEY_ACCESS_GRP_ID = "access_group_id"
KEY_ACCESS_GRP_IDS = "access_group_ids"
KEY_AI_RULE_ID = "rule_id"
KEY_AI_RULE_INFO = "rule_info"
KEY_AI_RULE_NAME = "rule_name"
KEY_CATEGORY = "category"
KEY_CREATED_TIME = "created_at"
KEY_COMPLETED_AT = "completed_at"
KEY_UPDATED_AT = "updated_by"
KEY_DASHBOARD_ID = "dashboard_id"
KEY_DASHBOARD_TYPE = "dashboard_type"
KEY_DATA = "data"
KEY_ENABLE_DELETE = "enable_delete"
KEY_ENABLE_EDIT = "enable_edit"
KEY_FEATURE_ADDITION = "feature_addition"
KEY_FROM_DATE = "from_date"
KEY_FROM_TIME = "from_time"
KEY_ID = "id"
KEY_IS_DEL = "is_deleted"
KEY_IS_OWNER = "isOwner"
KEY_KEYS = "keys"
KEY_MESSAGE = "message"
KEY_META = "meta"
KEY_MONGO_ID = "_id"
KEY_OWNER = "owner"
KEY_RECORDS = "records"
KEY_STARRED = "starred"
KEY_STATUS = "status"
KEY_TIME_RANGE = "timeRange"
KEY_TO_DATE = "to_date"
KEY_TO_TIME = "to_time"
KEY_USER = "user"
KEY_USERS = "users"
KEY_USER_DETAILS_CC = "userDetails"
KEY_USER_NAME = "username"
KEY_USER_TZ = "user_tz"
KEY_V_DEVICE = "virtual_device_id"
KEY_WIDGET_DATA = "widget_data"
KEY_WIDGET_ID = "widget_id"
KEY_WIDGET_ORDER = "widget_order"
KEY_WIDGET_TYPE = "widget_type"
KEY_COMMENTS = "comments"
KEY_WIDGET_COMMENT = "widget_comment"
KEY_CHAINED_USER_GROUP_ID = "sharing_info.userGroups.id"
class DateConstants:
date_format = "%d %b %Y %H:%M"
class ButtonConstants:
button_text_success = "fa fa-check-circle-o text-success"
button_text_danger = "fa fa-ban text-danger"
in_active = "In active"
class CaptchaKeys:
max_length = 5
class APIEndpoints:
# Base Proxies
proxy_space_manager = "/space_manager"
api_user_role = "/user_roles"
app_base_url = "/ilens_config"
# Space Manager API's
api_create_private_space = "/create_private_space"
api_fetch_space_details = "/fetch_space_details"
api_delete_space = "/delete_space"
api_spaces_header = "/space_header"
api_space_list_grid = "/space_list_grid"
# Common Endpoints
api_user = "/user"
api_list = "/list"
api_create = "/create"
api_delete = "/delete"
api_add = "/add"
api_remove_project = "/remove_project"
api_validate_delete = "/validate/delete"
api_disable = "/disable"
# User
api_fetch_info = "/fetch_info"
# Login
api_login = "/login"
api_esign = "/esign"
api_get_token = "/get_token"
api_logout = "/logout"
api_aad_login_uri = "/aad/login_uri"
api_aad_login = "/aad/login"
api_saml_verify = "/saml/verify"
api_saml_login = "/saml/login"
api_saml_logout = "/saml/logout"
api_saml_test_verify = "/saml/test/verify"
forgot_password = "/user/forgot_password"
update_password = "/user/update_password"
reset_password = "/user/reset_password"
send_external_image = "/send_external_image"
check_login = "/login-check"
verify_mfa = "/verify_mfa"
oauth_login = "/oauth/login"
api_saml_esign = "/saml/esign"
captcha_image = "/get_captcha_image"
validate_captcha = "/validate_captcha"
api_project_templates = "/project_templates"
class CommonStatusCode:
SUCCESS_CODES = (
200,
201,
204,
)
class IndustryCategoryCollectionKeys:
KEY_IS_DELETED = "is_deleted"
KEY_INDUSTRY_CATEGORY_NAME = "industry_category_name"
KEY_DESCRIPTION = "description"
KEY_INDUSTRY_CATEGORY_ID = "industry_category_id"
KEY_UPLOAD_ICON = "upload_icon"
class LookupKeys:
KEY_ID = "lookup_id"
KEY_NAME = "lookup_name"
class TagKeys:
KEY_TAG_ID = "id"
KEY_TAG_NAME = "tag_name"
class PrivateSpaceKeys:
KEY_CATALOG_SPACE_ID = "catalog_space_id"
KEY_CATALOG_SPACE_NAME = "catalog_space_name"
class STATUS:
SUCCESS = "success"
FAILED = "failed"
SUCCESS_CODES = [200, 201]
class CommonKeys:
KEY_EMAIL_ID = "email"
KEY_APPROVED = "approved"
KEY_REJECT = "rejected"
KEY_REVIEW_PENDING = "review_pending"
KEY_LAST_FAILED_ATTEMPT = "last_failed_attempt"
KEY_USER_ID = "user_id"
KEY_PROCESS_TEMPLATE = "process_template"
KEY_SITE_TEMPLATE = "site_template"
KEY_PROCESS_TEMPLT_ID = "process_templt_id"
KEY_KEY_LIST = "key_list"
KEY_VALUE = "value"
KEY_SITE_TEMPLT_ID = "site_templt_id"
KEY_TYPE = "type"
KEY_CONTENT_TYPE = "content_type"
KEY_PROJECT_ID = "project_id"
KEY_ISDEL = "isdeleted"
STD_TIME_ZONE = "Asia/Kolkata"
category = "dashboard_category_099"
under_review_json = {"type": "switch", "key": "under_review", "filterLabel": "Show Review Pending Items only."}
KEY_PUSH = "$push"
KEY_SORT = "$sort"
KEY_ARRAY_ELEMENT = "$arrayElemAt"
KEY_NAME = "$name"
KEY_VALUE_MONGO = "$value"
KEY_PROJECT = "$project"
KEY_MATCH = "$match"
KEY_EXISTS = "$exists"
KEY_GROUP = "$group"
KEY_ACCESS_GRP = "access_group"
KEY_ACCESS_GRP_ID = "access_group_id"
KEY_ACCESS_GRP_IDS = "access_group_ids"
KEY_AI_RULE_ID = "rule_id"
KEY_AI_RULE_INFO = "rule_info"
KEY_AI_RULE_NAME = "rule_name"
KEY_CATEGORY = "category"
KEY_CATEGORY_NAME = "categoryName"
KEY_CREATED_BY = "created_by"
KEY_CREATED_TIME = "created_at"
KEY_COMPLETED_AT = "completed_at"
KEY_UPDATED_AT = "updated_by"
KEY_LAST_UPDATED_TIME = "updated_at"
KEY_DASHBOARD_CATEGORY_ID = "dashboard_category_id"
KEY_DASHBOARD_ID = "dashboard_id"
KEY_DASHBOARD_TYPE = "dashboard_type"
KEY_DATA = "data"
KEY_ENABLE_DELETE = "enable_delete"
KEY_ENABLE_EDIT = "enable_edit"
KEY_FEATURE_ADDITION = "feature_addition"
KEY_FROM_DATE = "from_date"
KEY_FROM_TIME = "from_time"
KEY_ID = "id"
KEY_IS_DEL = "is_deleted"
KEY_IS_OWNER = "isOwner"
KEY_KEYS = "keys"
KEY_MESSAGE = "message"
KEY_META = "meta"
KEY_MONGO_ID = "_id"
KEY_OWNER = "owner"
KEY_RECORDS = "records"
KEY_SHARED_GROUP = "shared_group"
KEY_SHARED_USER = "shared_user"
KEY_SHARING_INFO = "sharing_info"
KEY_STARRED = "starred"
KEY_STATUS = "status"
KEY_TIME_RANGE = "timeRange"
KEY_TO_DATE = "to_date"
KEY_TO_TIME = "to_time"
KEY_USER = "user"
KEY_USERS = "users"
KEY_USER_DETAILS = "user_details"
KEY_USER_DETAILS_CC = "userDetails"
KEY_USER_NAME = "username"
KEY_USER_TZ = "user_tz"
KEY_V_DEVICE = "virtual_device_id"
KEY_WIDGET_DATA = "widget_data"
KEY_WIDGET_ID = "widget_id"
KEY_WIDGET_ORDER = "widget_order"
KEY_WIDGET_TYPE = "widget_type"
KEY_COMMENTS = "comments"
KEY_WIDGET_COMMENT = "widget_comment"
KEY_CHAINED_USER_GROUP_ID = "sharing_info.userGroups.id"
KEY_OPTIONS = "$options"
KEY_REGEX = "$regex"
class AssetDetailsKeys:
KEY_ASSET_MODEL_ID = "asset_model_id"
KEY_ASSET_VERSION = "asset_version"
class DefaultResponseMessages:
USER_UNAUTHORIZED = "User Unauthorized"
VALUE_SUCCESS = "success"
VALUE_FAILED = "failed"
VALUE_UNDEFINED = "undefined"
class DefaultResponseJson:
FAILED_RESPONSE = {
"status": DefaultResponseMessages.VALUE_FAILED,
"message": DefaultResponseMessages.VALUE_FAILED,
"data": {},
}
SUCCESS_RESPONSE = {
"status": DefaultResponseMessages.VALUE_SUCCESS,
"message": DefaultResponseMessages.VALUE_SUCCESS,
"data": [],
}
TABLE_FAILED_RESPONSE = {
"status": "failed",
"message": "failed",
"data": {"tableData": {"headerContent": [], "bodyContent": []}},
}
class Secrets:
LOCK_OUT_TIME_MINS = 30
leeway_in_mins = 10
unique_key = "45c37939-0f75"
token = "8674cd1d-2578-4a62-8ab7-d3ee5f9a"
issuer = "ilens"
alg = "RS256"
signature_key = "kliLensKLiLensKL"
signature_key_alg = ["HS256"]
class CollectionNames:
pipeline_info = "pipeline_info"
pipeline_instance = "pipeline_instance"
private_catalog_space = "private_catalog_space"
pipeline_category = "pipeline_category"
rule_definition = "rule_definition"
materials = "materials"
customer_apps = "customer_apps"
lookup_table = "lookup_table"
customer_apps_catalog = "customer_apps_catalog"
category_apps = "category_apps"
custom_node = "custom_node"
alarm_configuration = "alarm_configuration"
thing_model_events = "thing_model_events"
tags_v2 = "tags_v2"
thing_model_list = "thing_model_list"
thing_model_type = "thing_model_types"
thing_model_details = "thing_model_details"
asset_model_details = "asset_model_details"
asset_model_rule_engine = "asset_model_rule_engine"
asset_model_list = "asset_model_list"
resource_folder_details = "resource_folder_details"
industry_category = "industry_category"
tags = "tags"
tag_groups = "tag_groups"
rule_engine = "rule_engine"
tag_category = "tag_category"
units = "units"
email_gateway = "email_gateway"
unit_conversion = "unit_conversion"
unit_group = "unit_group"
process_conf = "process_conf"
constants = "constants"
unique_id = "unique_id"
shifts = "shifts"
site_conf = "site_conf"
rule_configuration = "rule_engine"
user = "user"
user_recent = "user_recent"
user_project = "user_project"
user_space = "user_space"
customer_projects = "customer_projects"
customer_space = "customer_space"
thresholds = "thresholds"
device_model = "device_model"
ai_models = "ai_models"
protocol_list = "protocol_list"
header = "header"
access_group = "access_group"
user_role = "user_role"
unique_id_collection = "unique_id"
metadata_db = "ilens_metadata"
ilens_configuration_db = "ilens_configuration"
device_instance = "device_instance"
ilens_hmi_db = "ilens_hmi"
scada_folder_details = "scada_folder_details"
svg_file_details = "svg_file_details"
live_tags = "live_tags"
sld = "sld_new"
customer_logos = "customer_logos"
job_list = "job_list"
lookup = "lookup_table"
template_dashboard = "template_dashboard"
dashboard_catalog = "dashboard_catalog"
collection_widget = "widget"
collection_dashboard = "dashboard"
category = "category"
steps = "steps"
global_steps = "global_steps"
step_category = "step_category"
workflows = "workflows"
workflow_permissions = "workflow_permissions"
triggers = "triggers"
logbook = "logbook"
logbook_lookups = "logbook_lookups"
plugin_meta = "plugin_meta"
temp_parameters = "temp_parameters"
class DBConstants:
# Collections
collection_job_list = "job_list"
collection_unique_id = "unique_id"
collection_periodic_jobs = "periodic_job_runs"
collection_workflow_instance = "workflow_instances"
collection_logbook = "logbook"
collection_constants = "constants"
collection_report_templates = "report_templates"
collection_user = "user"
collection_task_instance_data = "task_instance_data"
collection_task_instances = "task_instances"
collection_periodic_data = "periodic_data"
collection_steps = "steps"
user_info_history = "user_info_history"
collection_workflows = "workflows"
collection_reports = "define_reports"
ilens_configuration = "ilens_configuration"
user_role = "user_role"
class DatabaseNames:
ilens_configuration = "ilens_configuration"
ilens_thing_model = "ilens_thing_model"
ilens_hmi = "ilens_hmi"
ilens_asset_model = "ilens_asset_model"
ilens_widget = "ilens_widget"
global_catalog = "global_catalog"
ilens_assistant = "ilens_assistant"
plugins = "plugins"
class CustomerProjectKeys:
KEY_CUSTOMER_PROJECT_ID = "customer_project_id"
KEY_CUSTOMER_PROJECT_NAME = "customer_project_name"
class AggregationKeys:
match = "$match"
meta = "$meta"
unwind = "$unwind"
data = "$data"
date = "$date"
group = "$group"
push = "$push"
sum = "$sum"
exists = "$exists"
cond = "$cond"
regex = "$regex"
remove = "$$REMOVE"
root = "$$ROOT"
tostring = "$toString"
ifnull = "$ifNull"
limit = "$limit"
site_id = "$site_id"
concat = "$concat"
count = "$count"
expr = "$expr"
eq = "$eq"
agg_and = "$and"
replace_root = "$replaceRoot"
literal = "$literal"
sort = "$sort"
first = "$first"
options = "$options"
user_role_id = "$user_role_id"
user_role_name = "$user_role_name"
user_id = "$user_id"
username = "$username"
project = "$project"
project_id = "$project_id"
logbook_id = "$logbook_id"
logbook_version = "$logbook_version"
logbook_version_data = "$$logbook_version"
step_version = "$step_version"
status = "$status"
logbook_name = "$logbook_name"
workflow_id = "$workflow_id"
addfields = "$addFields"
workflow_version = "$workflow_version"
workflow_name = "$workflow_name"
lookup = "$lookup"
create_step_id = "$create_step_id"
current_status = "$current_status"
associated_workflow_id = "$associated_workflow_id"
associated_workflow_version = "$associated_workflow_version"
meta_created_at = "$meta.created_at"
meta_created_by = "$meta.created_by"
arrayelemat = "$arrayElemAt"
arraytoobject = "$arrayToObject"
step_data_category = "$step_data.step_category"
user_username = "$user.username"
step_data = "$step_data"
step_name = "$step_name"
step_id = "$step_id"
meta_createdat = "meta.created_at"
fullpath = "$full_path"
name = "$name"
merge_objects = "$mergeObjects"
version_comments = "$version_comments"
class WorkflowPermissionsKeys:
KEY_WORKFLOW_STATUS = "workflow_status"
KEY_STEP_ID = "step_id"
KEY_WORKFLOW_ID = "workflow_id"
KEY_WORKFLOW_VERSION = "workflow_version"
KEY_USER_ROLE = "user_role"
KEY_PERMISSIONS = "permissions"
KEY_SEQUENCE_NO = "sequence_no"
class TriggerKeys:
KEY_TRIGGER_ID = "trigger_id"
KEY_TRIGGER_TYPE = "trigger_type"
KEY_ROLE = "role"
class WorkflowKeys:
KEY_WORKFLOW_ID = "workflow_id"
KEY_WORKFLOW_VERSION = "workflow_version"
KEY_PROJECT_ID = "project_id"
KEY_WORKFLOW_NAME = "workflow_name"
class StepsKeys:
KEY_PROJECT_ID = "project_id"
class StepRecordKeys:
KEY_STEP_ID = "step_id"
KEY_PROJECT_ID = "project_id"
KEY_STEP_NAME = "step_name"
class User:
user_project_keys = [
"project_id",
"AccessLevel",
"userrole",
"access_group_ids",
"landing_page",
"is_app_user",
"product_access",
"app_url",
"location",
"department",
"section",
"access_level_list",
]
class DefaultMessage:
duplicate_email = "Duplicate email configured"
duplicate_profile_name = "Duplicate Profile name configured"
email_gateway_update_success = "Email gateway updated successfully"
email_gateway_create_success = "Email gateway created successfully"
sms_gateway_id_missing = "SMS gateway id is missing in input"
all_users_defination = "Inside the list all users definition "
assets_fetched = "Successfully fetched assets"
material_update_success = "Material Details Updated Successfully"
class SpaceHeaderContent:
space_table_actions_action_data = [
{
"type": "edit",
"tooltip": "Edit Task",
"action": "edit",
"class": "ra-sm-edit",
},
{"action": "delete", "tooltip": "Delete", "type": "delete", "class": "ra-sm-delete"},
{"action": "view", "tooltip": "View", "type": "view", "class": "ra-sm-viewable"},
]
space_list_header_content = [
{
"field": "catalog_space_name",
"headerName": "Catalog Space",
"inLineStyle": {"width": "100px", "min-width": "100px"},
"enable_column_search": True,
"header_type": "text",
"filterParams": {"suppressAndOrCondition": True},
},
{
"field": "catalog_space_description",
"headerName": "Catalog Space Description",
"inLineStyle": {"width": "100px", "min-width": "150px"},
"enable_column_search": True,
"header_type": "text",
"floatingFilter": False,
"filterParams": {"suppressAndOrCondition": True},
},
{
"field": "associated_projects",
"headerName": "Associated to Project(S)",
"inLineStyle": {"width": "100px", "min-width": "150px"},
"enable_column_search": True,
"header_type": "text",
"floatingFilter": False,
"filterParams": {"suppressAndOrCondition": True},
},
{
"field": "created_on",
"headerName": "Created On",
"inLineStyle": {"width": "100px", "min-width": "150px"},
"enable_column_search": True,
"header_type": "date_range",
"filter": "agDateColumnFilter",
"filterParams": {"suppressAndOrCondition": True},
},
]
class Account:
unplanned_id = "notification_01"
planned_id = "notification_02"
email_preferences = "email_preferences"
user_action_json = {
"addOptionalFields": {
"enableAdd": False,
"modalInfo": {"config": "profile", "modalTitle": "Profile Information"},
}
}
default_notification_data = [
{
"heading": "Unplanned events",
"description": "Receive emails about issues that can cause an outage",
"notification_id": unplanned_id,
"value": False,
},
{
"heading": "Planned events",
"description": "Receive emails about maintenance that is required to "
"keep the platform operating at optimal status",
"notification_id": planned_id,
"value": False,
},
]
supported_mime_type = ["image/png", "image/jpeg"]
image_extensions = [
".apng",
".avif",
".gif",
".jpg",
".jpeg",
".jfif",
".pjpeg",
".pjp",
".png",
]
class UserRoles:
root_user = "root_user"
super_user = "kl_super_admin"
GLOBAL_CATALOG_USER_TYPE = "catalog_user"
class User:
user_project_keys = [
"project_id",
"AccessLevel",
"userrole",
"access_group_ids",
"landing_page",
"is_app_user",
"product_access",
"app_url",
"location",
"department",
"section",
"access_level_list",
]
user_download_job_name = "user_access_download"
download_user_access_file_name = "User_Access_Report.xlsx"
import os
class UserRoles:
root_user = "root_user"
super_user = "kl_super_admin"
GLOBAL_CATALOG_USER_TYPE = "catalog_user"
required_keys = [
each_key.strip()
for each_key in os.environ.get(
"USER_ROLE_KEYS",
default="edit, create, view, publish, delete, "
"clone, share,attachLicense, operator,"
"auditLogs, screenshotRestriction, dashboard, title, auditLogsDownload, attachLicense, feedback, "
"uploadLicense",
).split(",")
]
user_id = "user_id"
username = "username"
email = "email"
name = "name"
userrole = "userrole"
access_group = "access_group"
access_group_ids = "access_group_ids"
access_group_id = "access_group_id"
access_list = "access_list"
description = "description"
height = "calc(100vh - 200px)"
access_list_action = [
{"label": "Edit", "type": "edit", "action": "edit"},
{"label": "Delete", "type": "delete", "action": "delete"},
]
remove_action = [
{
"label": "Remove",
"type": "remove",
"action": "remove",
"icon-class": "fa fa-minus-circle",
}
]
add_existing_action = [
{
"label": "Add existing",
"type": "addnew",
"action": "addexisting",
"custom_class": "btn-secondary",
}
]
user_download_option = [{"label": "", "type": "download", "action": "download"}]
access_list_external_action = [
{"label": "Create New", "type": "button", "action": "addnew", "icon_class": "fa fa-plus-circle"},
]
access_list_header_content = [
{"label": "Access Group", "key": "access_group"},
{"label": "Description", "key": "description"},
]
list_all_user_header = [
{
"label": "iLens ID",
"key": "username",
"tooltip": "status_info",
"type": "icon",
"value_position": "right",
"iconClass": "status_icon",
"enable_value_disp": True,
"enableTooltip": True,
"header_type": "text",
"options": [],
"enable_column_search": True,
},
{
"label": "Full Name",
"key": "full_name",
"header_type": "text",
"options": [],
"enable_column_search": True,
},
{
"label": "Email",
"key": "e_mail",
"header_type": "text",
"options": [],
"enable_column_search": True,
},
{
"label": "Role",
"key": "role",
"header_type": "select",
"options": [],
"enable_column_search": True,
},
{
"label": "Access Group",
"key": "access_group",
"header_type": "select",
"options": [],
"enable_column_search": True,
},
{
"label": "Created On",
"key": "created_on",
"header_type": "text",
"options": [],
"enable_column_search": False,
},
{
"label": "Last Login",
"key": "last_logged_in",
"header_type": "text",
"options": [],
"enable_column_search": False,
},
]
user_details_header = [
{"key": "name", "label": "Name"},
{"key": "username", "label": "User Name"},
{"key": "email", "label": "Email"},
{"key": "phonenumber", "label": "Phone Number"},
{"key": "userrole", "label": "User Role"},
{"key": "created_by", "label": "Created By"},
{"label": "Department", "key": "department"},
{"label": "Section", "key": "section"},
{"label": "Location", "key": "location"},
]
external_user_header = [
{"key": "username", "label": "Existing User"},
{"key": "userrole", "label": "User Role"},
{"key": "created_by", "label": "Created By"},
{"label": "Department", "key": "department"},
{"label": "Section", "key": "section"},
{"label": "Location", "key": "location"},
]
user_role_name = "user_role_name"
user_role_description = "user_role_description"
user_role_id = "user_role_id"
user_role_header = [
{"headerName": "Role", "field": "role"},
{"headerName": "Description", "field": "description"},
]
import re
from scripts.constants.db_constants import AggregationKeys
class AIModelAggregation:
@staticmethod
def ai_model_list_aggregation(project_id, filters=None):
query = [
{AggregationKeys.match: {"project_id": project_id}},
{
"$project": {
"_id": 0,
"project_id": "$project_id",
"label": "$label",
"description": "$description",
"ai_model_id": "$ai_model_id",
}
},
]
if filters:
match_filter = {"label": {AggregationKeys.regex: re.escape(filters.get("search", "")), "$options": "i"}}
query[0][AggregationKeys.match].update(match_filter)
return query
from scripts.constants.db_constants import AggregationKeys
class ConstantsAggregate:
resources_thumbnail = [
{AggregationKeys.match: {"type": "resources_thumbnail"}},
{"$unwind": "$data"},
{"$group": {"_id": None, "file_type": {"$push": {"k": "$data.file_type", "v": "$data.image_string"}}}},
{"$project": {"_id": 0, "file_type": {"$arrayToObject": "$file_type"}}},
]
@staticmethod
def site_template_details(site_template_id):
query = [
{AggregationKeys.match: {"type": "site_template"}},
{"$unwind": "$data"},
{AggregationKeys.match: {"data.site_templt_id": site_template_id}},
{"$project": {"_id": 0, "hierarchy_value": "$data.value", "hierarchy_key": "$data.key_list"}},
]
return query
from scripts.constants.db_constants import AggregationKeys
class CustomAppsAggregate:
@staticmethod
def get_apps(project_id, counter, records, filters=None):
skip = (counter - 1) * records
query = [
{
AggregationKeys.lookup: {
"from": "category_apps",
"localField": "app_category_id",
"foreignField": "app_category_id",
"as": "app_category_details",
}
},
{AggregationKeys.match: {"project_id": project_id}},
{"$project": {"_id": 0}},
{"$skip": skip},
{"$limit": records},
]
if filters.get("search"):
search_query = {
"$or": [
{"app_name": {AggregationKeys.regex: filters["search"], AggregationKeys.options: "i"}},
{
"description": {
AggregationKeys.regex: filters["search"],
AggregationKeys.options: "i",
}
},
]
}
else:
search_query = {
"$or": [
{"app_name": {AggregationKeys.regex: "", AggregationKeys.options: "i"}},
{"description": {AggregationKeys.regex: "", AggregationKeys.options: "i"}},
]
}
query.insert(6, {AggregationKeys.match: search_query})
return query
@staticmethod
def get_all_apps(project_id):
return [
{
AggregationKeys.lookup: {
"from": "category_apps",
"localField": "app_category_id",
"foreignField": "app_category_id",
"as": "app_category_details",
}
},
{AggregationKeys.match: {"project_id": project_id}},
{"$project": {"_id": 0}},
]
@staticmethod
def get_app_details(app_id, project_id):
aggregate_qry = [
{
AggregationKeys.lookup: {
"from": "category_apps",
"localField": "app_category_id",
"foreignField": "app_category_id",
"as": "app_category_details",
}
},
{AggregationKeys.match: {"app_id": app_id, "project_id": project_id}},
]
return aggregate_qry
from scripts.constants.db_constants import (
AggregationKeys,
)
class CustomNodeAggregate:
@staticmethod
def fetch_custom_nodes(query_data, filters=None, filter_key=False):
project_id = query_data.get("project_id")
counter = int(query_data.get("counter")) if query_data.get("counter") else 1
records = int(query_data.get("records")) if query_data.get("records") else 10
skip = (counter - 1) * records
query = [
{AggregationKeys.match: {"project_id": project_id}},
{"$group": {"_id": "$custom_node_id", "value": {"$sum": 1}, "name": {"$push": "$$ROOT"}}},
{AggregationKeys.sort: {"value": -1}},
{AggregationKeys.project: {"name": {"$arrayElemAt": [AggregationKeys.name, {"$subtract": ["$value", 1]}]}}},
{"$replaceRoot": {"newRoot": AggregationKeys.name}},
{"$skip": skip},
{"$limit": records},
{AggregationKeys.project: {"_id": 0}},
]
if filter_key:
search_query = {
"$or": [{"node_name": {AggregationKeys.regex: filters["search"], AggregationKeys.options: "i"}}]
}
query.insert(6, {AggregationKeys.match: search_query})
if filters.get("sort_by"):
sort_value = 1 if filters["sort_by"] == "asc" else -1
sort_query = {"node_name": sort_value}
query.insert(7, {AggregationKeys.sort: sort_query})
return query
@staticmethod
def fetch_custom_node_count(project_id, filters=None, filter_key=False):
query = [
{AggregationKeys.match: {"project_id": project_id}},
{"$group": {"_id": "$custom_node_id", "value": {"$sum": 1}, "name": {"$push": "$$ROOT"}}},
{AggregationKeys.sort: {"value": -1}},
{AggregationKeys.project: {"name": {"$arrayElemAt": [AggregationKeys.name, {"$subtract": ["$value", 1]}]}}},
{"$replaceRoot": {"newRoot": AggregationKeys.name}},
{"$count": "count"},
]
if filter_key:
if filters.get("search"):
search_query = {
"$or": [
{
"node_name": {
AggregationKeys.regex: filters["search"],
AggregationKeys.options: "i",
}
}
]
}
else:
search_query = {"$or": [{"node_name": {AggregationKeys.regex: "", AggregationKeys.options: "i"}}]}
query.insert(6, {AggregationKeys.match: search_query})
return query
from scripts.constants.db_constants import AggregationKeys
class UsersAggregate:
@staticmethod
def get_users_list(project_id=None):
query_json = [
{
"$project": {
"user_id": AggregationKeys.user_id,
"username": AggregationKeys.username,
"userName": {"$toLower": AggregationKeys.username},
}
},
{"$sort": {"userName": 1}},
{
AggregationKeys.group: {
"_id": None,
"data": {
AggregationKeys.push: {
"k": {AggregationKeys.ifnull: [AggregationKeys.user_id, ""]},
"v": {AggregationKeys.ifnull: [AggregationKeys.username, ""]},
}
},
}
},
{AggregationKeys.replace_root: {"newRoot": {AggregationKeys.arraytoobject: AggregationKeys.data}}},
]
if bool(project_id):
query_json.insert(0, {AggregationKeys.match: {"project_id": project_id}})
return query_json
@staticmethod
def get_users_value_pairs(project_id=None):
query_json = [
{
AggregationKeys.group: {
"_id": None,
"data": {
AggregationKeys.push: {
"v": {AggregationKeys.ifnull: [AggregationKeys.user_id, ""]},
"k": {AggregationKeys.ifnull: [AggregationKeys.username, ""]},
}
},
}
},
{AggregationKeys.replace_root: {"newRoot": {AggregationKeys.arraytoobject: AggregationKeys.data}}},
]
if bool(project_id):
query_json.insert(0, {AggregationKeys.match: {"project_id": project_id}})
return query_json
@staticmethod
def get_users_list_aggregate(project_id=None):
query_json = [
{
AggregationKeys.group: {
"_id": None,
"data": {
AggregationKeys.push: {
"k": {"$ifNull": ["$user_id", ""]},
"v": {"$ifNull": ["$username", ""]},
}
},
}
},
{AggregationKeys.replace_root: {"newRoot": {AggregationKeys.arraytoobject: AggregationKeys.data}}},
]
if bool(project_id):
query_json.insert(0, {AggregationKeys.match: {"project_id": project_id}})
return query_json
from scripts.constants.db_constants import DBConstants
collection_job_list = DBConstants.collection_job_list
collection_report_templates = DBConstants.collection_report_templates
collection_user_list = DBConstants.collection_user
collection_reports = DBConstants.collection_reports
collection_constants = DBConstants.collection_constants
collection_user_role = DBConstants.user_role
from typing import List, Optional
from pydantic import BaseModel
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import CollectionNames, DatabaseNames
class AIModelSchema(BaseModel):
ai_model_id: str
label: Optional[str] = ""
description: Optional[str] = ""
img: Optional[List] = []
class AIModel(CollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.global_catalog, collection=CollectionNames.ai_models)
self.project_id = project_id
def find_all(self, project_id):
records = self.find({"project_id": project_id})
if not records:
return []
return list(records)
def get_data_by_aggregate(self, query_json: list):
return list(self.aggregate(query_json))
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import CollectionNames, DBConstants
class CategoryApps(CollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(
mongo_client, database=DBConstants.ilens_configuration, collection=CollectionNames.category_apps
)
self.project_id = project_id
def find_app_category_by_project(self, project_id, filter_dict=None):
query = {"project_id": project_id}
record = self.find(query=query, filter_dict=filter_dict)
return record or []
from typing import Any, Optional
from pydantic import BaseModel
from ut_mongo_util import CollectionBaseClass
from scripts.constants.common_constants import CommonKeys
from scripts.constants.db_constants import DBConstants
from scripts.db.mongo.ilens_configuration.collections import collection_constants
class ConstantsSchema(BaseModel):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
type: Optional[str] = ""
data: Optional[Any] = None
map_json: Optional[Any] = None
content_type: Optional[Any] = None
content: Optional[Any] = None
class Constants(CollectionBaseClass):
def __init__(self, mongo_client):
super().__init__(mongo_client, database=DBConstants.ilens_configuration, collection=collection_constants)
@property
def key_type(self):
return CommonKeys.KEY_TYPE
@property
def key_content_type(self):
return CommonKeys.KEY_CONTENT_TYPE
def find_constant_by_dict(self, _type):
"""
The following function will give one record for a given set of
search parameters as keyword arguments
:param _type:
:return:
"""
record = self.find_one(query={self.key_type: _type})
if not record:
return dict(record)
return dict(record)
def find_constant(self, _type, filter_dict=None):
"""
The following function will give one record for a given set of
search parameters as keyword arguments
:param _type:
:param filter_dict:
:return:
"""
query = {self.key_type: _type}
record = self.find_one(query=query, filter_dict=filter_dict)
if not record:
return {}
return ConstantsSchema(**record).dict()
def find_constant_by_query(self, query, filter_dict=None):
record = self.find_one(query=query, filter_dict=filter_dict)
if not record:
return {}
return ConstantsSchema(**record).dict()
def insert_one_constant(self, data):
"""
The following function will insert one tag in the
tags collections
:param self:
:param data:
:return:
"""
return self.insert_one(data)
def find_constant_by_content(self, content_type):
"""
The following function will give one record for a given set of
search parameters as keyword arguments
"""
query = {self.key_content_type: content_type}
record = self.find_one(query=query)
if not record:
return {}
return record
def find_constant_by_aggregate(self, query):
constant = self.aggregate(query)
if not constant:
return []
return list(constant)
def find_constant_dict(self, _type, filter_dict=None):
"""
The following function will give one record for a given set of
search parameters as keyword arguments
:param _type:
:param filter_dict:
:return:
"""
query = {self.key_type: _type}
record = self.find_one(query=query, filter_dict=filter_dict)
if not record:
return {}
return dict(record)
from pydantic import BaseModel
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import CollectionNames, DatabaseNames
class CustomNodeSchema(BaseModel):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
class CustomNode(CollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(
mongo_client, database=DatabaseNames.ilens_configuration, collection=CollectionNames.custom_node
)
self.project_id = project_id
def find_all_by_id(self, category_id):
_return = []
_category_id = category_id
_query = {"category_id": _category_id}
_response = self.find(query=_query, filter_dict={"_id": 0})
if _response:
_return = list(_response)
return _return
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import CollectionNames, DBConstants
class CustomerApps(CollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(
mongo_client, database=DBConstants.ilens_configuration, collection=CollectionNames.customer_apps
)
self.project_id = project_id
def find_app_by_project(self, project_id, filter_dict=None, sort_starred=False):
query = {"project_id": project_id}
sort_query = [("starred", -1)] if sort_starred else None
record = self.find(query=query, filter_dict=filter_dict, sort=sort_query)
return record or []
from typing import Dict, Optional
from pydantic import BaseModel
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import (
CollectionNames,
CustomerProjectKeys,
DatabaseNames,
)
class CustomerProjectsSchema(BaseModel):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
customer_project_name: Optional[str] = None
description: Optional[str] = None
site_templt_id: Optional[str] = None
logo_name: Optional[str] = None
logo_url: Optional[str] = None
process_templt_id: Optional[str] = None
update_details: Optional[Dict] = None
user_id: Optional[str] = None
customer_project_id: Optional[str] = None
product_encrypted: Optional[bool] = None
project_type: Optional[str] = None
class CustomerProjects(CollectionBaseClass):
def __init__(self, mongo_client):
super().__init__(
mongo_client, database=DatabaseNames.ilens_configuration, collection=CollectionNames.customer_projects
)
@property
def key_customer_project_id(self):
return CustomerProjectKeys.KEY_CUSTOMER_PROJECT_ID
@property
def key_customer_project_name(self):
return CustomerProjectKeys.KEY_CUSTOMER_PROJECT_NAME
def find_project(self, project_id=None, project_name=None, filter_dict=None):
"""
The following function will give one record for a given set of
search parameters as keyword arguments
:param project_name:
:param filter_dict:
:param project_id:
:return:
"""
query = {}
if project_id:
query.update({self.key_customer_project_id: project_id})
if project_name:
query.update({self.key_customer_project_name: project_name})
record = self.find_one(query=query, filter_dict=filter_dict)
if not record:
return {}
return CustomerProjectsSchema(**record).dict()
def find_project_by_query(self, query, filter_dict=None):
record = self.find(query=query, filter_dict=filter_dict)
if record:
return record
return []
def fetch_project_details(self):
query = {}
filter_dict = {self.key_customer_project_id: 1, "_id": 0, self.key_customer_project_name: 1}
records = self.find(query=query, filter_dict=filter_dict)
if records:
project_name_mapp = {}
for record in records:
project_name_mapp[record.get(self.key_customer_project_id)] = record.get(self.key_customer_project_name)
return project_name_mapp
return {}
def insert_one_project(self, data):
"""
The following function will insert one project in the
customer_projects collections
:param self:
:param data:
:return:
"""
return self.insert_one(data)
def update_one_project(self, project_id, data):
query = {self.key_customer_project_id: project_id}
return self.update_one(query=query, data=data)
def delete_one_project(self, project_id):
if project_id:
query = {self.key_customer_project_id: project_id}
return self.delete_one(query)
else:
return False
def get_project_data_by_aggregate(self, query: list):
return list(self.aggregate(pipelines=query))
from typing import Dict, Optional
from pydantic import BaseModel
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import (
CollectionNames,
CustomerProjectKeys,
DatabaseNames,
)
class CustomerProjectsSchema(BaseModel):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
customer_project_name: Optional[str] = None
description: Optional[str] = None
site_templt_id: Optional[str] = None
logo_name: Optional[str] = None
logo_url: Optional[str] = None
process_templt_id: Optional[str] = None
update_details: Optional[Dict] = None
user_id: Optional[str] = None
customer_project_id: Optional[str] = None
product_encrypted: Optional[bool] = None
project_type: Optional[str] = None
class CustomerSpaces(CollectionBaseClass):
def __init__(self, mongo_client):
super().__init__(
mongo_client, database=DatabaseNames.ilens_configuration, collection=CollectionNames.customer_space
)
@property
def key_customer_project_id(self):
return CustomerProjectKeys.KEY_CUSTOMER_PROJECT_ID
@property
def key_customer_project_name(self):
return CustomerProjectKeys.KEY_CUSTOMER_PROJECT_NAME
def find_project(self, project_id=None, project_name=None, filter_dict=None):
"""
The following function will give one record for a given set of
search parameters as keyword arguments
:param project_name:
:param filter_dict:
:param project_id:
:return:
"""
query = {}
if project_id:
query.update({self.key_customer_project_id: project_id})
if project_name:
query.update({self.key_customer_project_name: project_name})
record = self.find_one(query=query, filter_dict=filter_dict)
if not record:
return {}
return CustomerProjectsSchema(**record).dict()
def find_project_by_query(self, query, filter_dict=None):
record = self.find(query=query, filter_dict=filter_dict)
if record:
return record
return []
def fetch_project_details(self):
query = {}
filter_dict = {self.key_customer_project_id: 1, "_id": 0, self.key_customer_project_name: 1}
records = self.find(query=query, filter_dict=filter_dict)
if records:
project_name_mapp = {}
for record in records:
project_name_mapp[record.get(self.key_customer_project_id)] = record.get(self.key_customer_project_name)
return project_name_mapp
return {}
def insert_one_project(self, data):
"""
The following function will insert one project in the
customer_projects collections
:param self:
:param data:
:return:
"""
return self.insert_one(data)
def update_one_project(self, project_id, data):
query = {self.key_customer_project_id: project_id}
return self.update_one(query=query, data=data)
def delete_one_project(self, project_id):
if project_id:
query = {self.key_customer_project_id: project_id}
return self.delete_one(query)
else:
return False
def get_project_data_by_aggregate(self, query: list):
return list(self.aggregate(pipelines=query))
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import CollectionNames, DatabaseNames
class Header(CollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_configuration, collection=CollectionNames.header)
self.project_id = project_id
def find_by_header_id(self, header_id):
_headerId = header_id
_return = {}
_response = self.find_one(query={"header_id": _headerId}, filter_dict={"_id": 0})
if _response:
_return = _response
return _return
def update_by_id(self, header_id, data):
_result = self.update_one(query={"header_id": header_id}, data=data)
if _result:
return True
return False
from typing import Optional
from pydantic import BaseModel
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import CollectionNames, DatabaseNames
class JobListSchema(BaseModel):
job_id: str
status: Optional[str] = ""
project_id: Optional[str] = ""
job_name: Optional[str] = ""
custom_node_id: Optional[str] = ""
start_time: Optional[float] = None
end_time: Optional[str] = ""
owner: Optional[str] = ""
job_type: Optional[str] = ""
action: Optional[str] = ""
remark: Optional[str] = ""
class JobLists(CollectionBaseClass):
def __init__(self, mongo_client):
super().__init__(mongo_client, database=DatabaseNames.ilens_configuration, collection=CollectionNames.job_list)
@property
def key_job_id(self):
return "job_id"
from ut_mongo_util import CollectionBaseClass
from scripts.constants.common_constants import LookupKeys
from scripts.constants.db_constants import CollectionNames, DBConstants
from scripts.logging import logger
class Lookups(CollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(
mongo_client, database=DBConstants.ilens_configuration, collection=CollectionNames.lookup_table
)
self.project_id = project_id
@property
def key_lookup_id(self):
return LookupKeys.KEY_ID
@property
def key_name(self):
return LookupKeys.KEY_NAME
def find_all_lookups(self, **query):
"""
The following function will give all lookups for the given set of
search parameters as keyword arguments
:return:
"""
all_lookups = self.find(**query)
if not all_lookups:
return []
return list(all_lookups)
def find_by_id(self, lookup_id, project_id):
"""
The following function will give one lookup for a given set of
search parameters as keyword arguments
:return:
"""
one_lookup = self.find_one(query={self.key_lookup_id: lookup_id, "project_id": project_id})
if not one_lookup:
return {}
return one_lookup
def find_one_lookup(self, lookup_name, project_id, filter_dict=None):
query = {self.key_name: lookup_name, "project_id": project_id}
record = self.find_one(query=query, filter_dict=filter_dict)
if not record:
return {}
return dict(record)
def find_one_lookup_name(self, lookup_name, filter_dict=None):
query = {self.key_name: lookup_name}
record = self.find_one(query=query, filter_dict=filter_dict)
if not record:
return {}
return dict(record)
def find_by_param(self, **query):
"""
The following function will give one lookup for a given set of
search parameters as keyword arguments
:return:
"""
one_lookup = self.find(query)
if one_lookup:
return list(one_lookup)
return []
def update_one_lookup(self, lookup_name, lookup_id, project_id, data):
"""
The following function will update one lookup in
tags collection based on the given query
"""
query_dict = {self.key_name: lookup_name, self.key_lookup_id: lookup_id, "project_id": project_id}
return self.update_one(data=data, query=query_dict)
def insert_one_lookup(self, data):
return self.insert_one(data)
def delete_one_lookup(self, lookup_id):
"""
The following function will delete one lookup in
tags collection based on the given query
"""
if lookup_id:
return self.delete_one(query={self.key_lookup_id: lookup_id})
else:
return False
def find_by_aggregate(self, query):
record = self.aggregate(query)
if record:
return list(record)
else:
return []
def map_lookup_keys(self, lookup_name, project_id):
query = {self.key_name: lookup_name, "project_id": project_id}
_record = self.find_one(query=query)
if not _record:
return {}
return {record["lookupdata_id"]: record["lookup_value"] for record in _record["lookup_data"]}
def find_one_and_update(self, query, data, upsert=True):
try:
database_name = self.database
collection_name = self.collection
db = self.client[database_name]
collection = db[collection_name]
response = collection.update_one(query, data, upsert=upsert)
return response.modified_count
except Exception as e:
logger.exception(e)
raise e
def find_one_lookup_as_label_value(self, lookup_name, project_id, filter_dict=None):
query = {self.key_name: lookup_name, "project_id": project_id}
record = self.find_one(query=query, filter_dict=filter_dict)
label_value_list = []
if record:
for each_lookup in record.get("lookup_data", []):
label_value_list.append(
{"label": each_lookup.get("lookup_value"), "value": each_lookup.get("lookupdata_id")}
)
return label_value_list
def get_lookup_property_values(self, lookup_name, project_id, property, lookup_id_list, filter_dict=None):
create_property_dict = {}
query = {self.key_name: lookup_name, "project_id": project_id}
record = self.find_one(query=query, filter_dict=filter_dict)
if record:
for each_lookup in record.get("lookup_data", []):
if lookup_id_list and each_lookup["lookupdata_id"] not in lookup_id_list:
continue
for each_property in each_lookup["properties"]:
if each_property["key"] == property:
create_property_dict[each_lookup["lookupdata_id"]] = each_property["value"]
break
return create_property_dict
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import CollectionNames, DBConstants
class Materials(CollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DBConstants.ilens_configuration, collection=CollectionNames.materials)
self.project_id = project_id
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import CollectionNames, DatabaseNames
class PipelineCategory(CollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(
mongo_client, database=DatabaseNames.ilens_configuration, collection=CollectionNames.pipeline_category
)
self.project_id = project_id
from pydantic import BaseModel
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import CollectionNames, DatabaseNames
class PipelineInfoSchema(BaseModel):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
class PipelineInfo(CollectionBaseClass):
def __init__(self, mongo_client, project_id):
super().__init__(
mongo_client, database=DatabaseNames.ilens_configuration, collection=CollectionNames.pipeline_info
)
self.project_id = project_id
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import CollectionNames, DatabaseNames
class PipelineInstance(CollectionBaseClass):
def __init__(self, mongo_client, project_id):
super().__init__(
mongo_client, database=DatabaseNames.ilens_configuration, collection=CollectionNames.pipeline_instance
)
self.project_id = project_id
from typing import Optional
from pydantic import BaseModel
from ut_mongo_util import CollectionBaseClass
from scripts.constants.common_constants import PrivateSpaceKeys
from scripts.constants.db_constants import CollectionNames, DatabaseNames
class PrivateCatalogSpace(BaseModel):
catalog_space_id: Optional[str] = ""
catalog_space_name: str
associated_projects: Optional[list] = []
space_description: Optional[str] = ""
created_at: Optional[int] = 0
created_by: Optional[str] = ""
updated_at: Optional[int] = 0
updated_by: Optional[str] = ""
is_private: bool = True
approvers: Optional[list] = []
class PrivateCatalogSpaceConn(CollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(
mongo_client, database=DatabaseNames.ilens_configuration, collection=CollectionNames.private_catalog_space
)
self.project_id = project_id
@property
def key_space_id(self):
return PrivateSpaceKeys.KEY_CATALOG_SPACE_ID
@property
def key_space_name(self):
return PrivateSpaceKeys.KEY_CATALOG_SPACE_NAME
def find_name_by_id(self, tag_id: str):
query = {self.key_space_id: tag_id}
filter_dict = {self.key_space_name: 1, "_id": 0}
record = self.find_one(query, filter_dict)
if not record:
return None
return record[self.key_tag_name]
def find_tags(self, query):
all_spaces = self.find(query=query)
if all_spaces:
return list(all_spaces)
return []
def fetch_one_private_space(self, filter_dict=None, **query):
one_step = self.find_one(filter_dict=filter_dict, query=query)
return PrivateCatalogSpace(**one_step) or {}
def update_one_space(self, data, upsert=False, **query):
"""
The following function will update one step in
steps collection based on the given query
:param data:
:param upsert:
:param query:
:return:
"""
return self.update_one(data=data, upsert=upsert, query=query)
def get_data_by_aggregate(self, query_json: list):
return list(self.aggregate(query_json))
def soft_delete_space(self, catalog_space_id):
json_update = {"is_deleted": True}
query = {self.key_space_id: catalog_space_id}
return self.update_many(data=json_update, query=query)
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import CollectionNames, DBConstants
class RuleDefinition(CollectionBaseClass):
def __init__(self, mongo_client):
super().__init__(
mongo_client, database=DBConstants.ilens_configuration, collection=CollectionNames.rule_definition
)
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import CollectionNames, DBConstants
class RuleEngine(CollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DBConstants.ilens_configuration, collection=CollectionNames.rule_engine)
self.project_id = project_id
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import CollectionNames, DBConstants
class TagCategory(CollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(
mongo_client, database=DBConstants.ilens_configuration, collection=CollectionNames.tag_category
)
self.project_id = project_id
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import CollectionNames, DBConstants
class TagGroups(CollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DBConstants.ilens_configuration, collection=CollectionNames.tag_groups)
self.project_id = project_id
from ut_mongo_util import CollectionBaseClass
from scripts.constants.common_constants import TagKeys
from scripts.constants.db_constants import CollectionNames, DBConstants
class Tags(CollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DBConstants.ilens_configuration, collection=CollectionNames.tags)
self.project_id = project_id
@property
def key_tag_id(self):
return TagKeys.KEY_TAG_ID
@property
def key_tag_name(self):
return TagKeys.KEY_TAG_NAME
def find_name_by_id(self, tag_id: str):
query = {self.key_tag_id: tag_id}
filter_dict = {self.key_tag_name: 1, "_id": 0}
record = self.find_one(query, filter_dict)
if not record:
return None
return record[self.key_tag_name]
def find_all_tags(self, sort=None, skip=0, limit=None, **query):
"""
The following function will give all tags for the given set of
search parameters as keyword arguments
:param sort:
:param skip:
:param limit:
:param query:
:return:
"""
filter_dict = {"id": 1, "tag_name": 1, "tag_category_name": 1, "tag_category_id": 1, "description": 1, "_id": 0}
response = self.find(filter_dict=filter_dict, sort=sort, skip=skip, limit=limit, query=query)
if not response:
return []
return list(response)
def find_tags(self, query):
all_tags = self.find(query=query)
if all_tags:
return list(all_tags)
return []
def find_tags_by_aggregate(self, query):
tags = self.aggregate(query)
if not tags:
return []
return list(tags)
def insert_one_tag(self, data):
"""
The following function will insert one tag in the
tags collections
:param data:
:return:
"""
return self.insert_one(data)
def insert_many_tags(self, data):
"""
The following function will insert many tags in the
tags collection
:param data:
:return:
"""
return self.insert_many(data)
from typing import Any, Union
from ut_mongo_util import CollectionBaseClass
from scripts.constants.common_constants import CommonKeys
from scripts.constants.db_constants import CollectionNames, DBConstants
class TempParameters(CollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(
mongo_client,
database=DBConstants.ilens_configuration,
collection=CollectionNames.temp_parameters,
)
self.project_id = project_id
@property
def key_project_id(self):
return CommonKeys.KEY_PROJECT_ID
@property
def key_user_id(self):
return CommonKeys.KEY_USER_ID
@property
def key_id(self):
return CommonKeys.KEY_ID
def find_by_project(
self, project_id: str, projections=None, sort=None, query_dict=None, limit=None, skip=0, **filters
) -> Union[Any, None]:
query = {self.key_project_id: project_id}
if query_dict:
query |= query_dict
if filters:
query.update(filters)
records = self.find(query, projections, sort=sort, limit=limit, skip=skip)
return list(records) if records else None
def insert_many_parameters(self, data):
"""
The following function will insert more than one parameters data for temporary use
:param self:
:param data:
:return:
"""
return self.insert_many(data)
def delete_many_parameters(self, project_id=None, user_id=None, delete_key=None):
"""
The following function will delete more than one parameters
data based on project_id and user_id
:param self:
:param project_id:
:param user_id:
:param delete_key:
:return:
"""
query = {}
if project_id:
query[self.key_project_id] = project_id
if user_id:
query[self.key_user_id] = user_id
if delete_key:
query[delete_key] = True
return self.delete_many(query)
def delete_one_parameter(self, project_id, user_id, tag_id=None, temp_id=None):
"""
The following function will delete one parameter data based on query
"""
query = {self.key_project_id: project_id, self.key_user_id: user_id}
if tag_id:
query[self.key_id] = tag_id
if temp_id:
query["temp_id"] = temp_id
return self.delete_one(query)
def find_by_aggregate(self, query):
return list(record) if (record := self.aggregate(query)) else []
import re
from typing import Any, Dict, List, Optional, Union
from pydantic import BaseModel
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import DBConstants
from scripts.constants.db_constants import User as UserConstants
from scripts.db.mongo.ilens_configuration.collections import collection_user_list
from scripts.db.mongo.ilens_configuration.collections.user_space import UserSpace
class UserCollectionKeys:
KEY_LANGUAGE = "language"
KEY_NAME = "name"
KEY_USER_ID = "user_id"
KEY_SPACE_ID = "space_id"
KEY_USERNAME = "username"
KEY_USER_ROLE = "userrole"
KEY_EMAIL = "email"
class UserSchema(BaseModel):
name: Optional[str] = ""
space_id: Optional[str] = ""
username: Optional[str] = ""
password: Optional[str] = ""
email: Optional[Any] = None
phonenumber: Optional[Any] = None
userrole: Optional[List[str]] = None
user_type: Optional[str] = ""
user_id: Optional[str] = ""
client_id: Optional[str] = ""
created_by: Optional[str] = ""
encryption_salt: Optional[Dict] = {}
product_encrypted: Optional[bool] = False
language: Optional[str] = ""
passwordReset: Optional[Dict] = {}
failed_attempts: Optional[int] = 0
is_user_locked: Optional[bool] = False
last_failed_attempt: Optional[str] = ""
location: Optional[str] = ""
azure_id: Optional[str] = ""
expires_on: Optional[str] = ""
token_azure: Optional[str] = ""
disable_user: Optional[bool] = False
created_on: Optional[int] = 0
updated_by: Optional[str] = ""
updated_on: Optional[int] = 0
mfa_enabled: Optional[bool] = False
mfa_configured: Optional[bool] = False
secret: Optional[str] = ""
mfa_enabled_on: Optional[int] = 0
password_added_on: Optional[int] = 0
default_project: Optional[str] = ""
class User(CollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DBConstants.ilens_configuration, collection=collection_user_list)
self.user_space_mongo = UserSpace()
self.project_id = project_id
self.key_user_id = UserCollectionKeys.KEY_USER_ID
self.key_space_id = UserCollectionKeys.KEY_SPACE_ID
self.key_username = UserCollectionKeys.KEY_USERNAME
self.key_email = UserCollectionKeys.KEY_EMAIL
self.project_specific_keys = UserConstants.user_project_keys
def find_user(self, space_id, user_id=None, username=None, email=None, filter_dict=None):
query = {}
if user_id:
query[self.key_user_id] = user_id
if username:
query[self.key_username] = username
if email:
query[self.key_email] = re.compile(email, re.IGNORECASE)
query[self.key_email] = email
user = self.find_decrypted(query=query, filter_dict=filter_dict)
if user:
if space_id != user["space_id"]:
user_space_record = self.user_space_mongo.fetch_user_project(
user_id=user.get(self.key_user_id), space_id=space_id
)
if user_space_record:
for item in self.project_specific_keys:
user[item] = user_space_record.get(item, None)
return UserSchema(**user)
return user
@staticmethod
def get_users_list(project_id=None):
query_json = [
{
"$group": {
"_id": None,
"data": {"$push": {"k": {"$ifNull": ["$user_id", ""]}, "v": {"$ifNull": ["$username", ""]}}},
}
},
{"$replaceRoot": {"newRoot": {"$arrayToObject": "$data"}}},
]
if bool(project_id):
query_json.insert(0, {"$match": {"project_id": project_id}})
return query_json
def users_list_by_aggregate(self, query: list):
return self.aggregate(pipelines=query)
def find_user_by_project_id(self, user_id, project_id):
user = self.find_one(query={self.key_user_id: user_id, self.key_project_id: project_id})
if user:
return dict(user)
return user
def get_all_users(self, filter_dict=None, sort=None, skip=0, limit=None, **query):
users = self.find(filter_dict=filter_dict, sort=sort, skip=skip, limit=limit, query=query)
if users:
return list(users)
return []
def find_user_role_for_user_id(self, user_id, project_id):
query = {"user_id": user_id, "project_id": project_id}
filter_dict = {"userrole": 1, "_id": 0}
return self.find_one(query=query, filter_dict=filter_dict)
def find_base_user(self, user_id=None, username=None, email=None, filter_dict=None):
query = {}
if user_id:
query[self.key_user_id] = user_id
if username:
query[self.key_username] = username
if email:
query[self.key_email] = re.compile(email, re.IGNORECASE)
if not (user := self.find_decrypted(query=query, filter_dict=filter_dict)):
return user
try:
return UserSchema(**user)
except Exception:
return user
def find_by_space(
self,
projections=None,
sort=None,
query_dict=None,
limit=None,
skip=0,
**filters,
) -> Union[Any, None]:
query = {}
if query_dict:
query |= query_dict
if filters:
query.update(filters)
records = self.find(query, projections, sort=sort, limit=limit, skip=skip)
if records:
records = self.get_decrypted_records(records)
return list(records) if records else []
def delete_one_user(self, user_id, space_id):
return self.delete_one(query={self.key_user_id: user_id, self.key_space_id: space_id})
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import DatabaseNames, DBConstants
class UserInfoHistory(CollectionBaseClass):
def __init__(self, mongo_client):
super().__init__(
mongo_client,
database=DatabaseNames.ilens_configuration,
collection=DBConstants.user_info_history,
)
def insert_user_info_history(self, data):
return self.insert_one(data)
from ut_mongo_util import CollectionBaseClass, mongo_client
from scripts.constants.db_constants import CollectionNames, DBConstants
collection_name = CollectionNames.user_project
class UserCollectionKeys:
KEY_LANGUAGE = "language"
KEY_NAME = "name"
KEY_USER_ID = "user_id"
KEY_PROJECT_ID = "project_id"
KEY_USERNAME = "username"
KEY_USER_ROLE = "userrole"
class UserProject(CollectionBaseClass):
key_username = UserCollectionKeys.KEY_USERNAME
key_user_id = UserCollectionKeys.KEY_USER_ID
key_language = UserCollectionKeys.KEY_LANGUAGE
key_name = UserCollectionKeys.KEY_NAME
key_project_id = UserCollectionKeys.KEY_PROJECT_ID
def __init__(self, project_id=None):
super().__init__(
mongo_client,
database=DBConstants.ilens_configuration,
collection=collection_name,
)
def fetch_user_project(self, user_id, project_id):
query = {self.key_user_id: user_id, self.key_project_id: project_id}
user = self.find_one(query=query)
return user
def fetch_user_project_with_details(self, user_id, project_id):
query = [
{"$match": {"user_id": user_id, "project_id": project_id}},
{"$lookup": {"from": "user", "localField": "user_id", "foreignField": "user_id", "as": "user_details"}},
{"$unwind": {"path": "$user_details"}},
{
"$project": {
"project_id": 1,
"AccessLevel": 1,
"access_group_ids": 1,
"userrole": 1,
"user_id": 1,
"name": "$user_details.name",
"email": "$user_details.email",
"username": "$user_details.username",
}
},
]
user = self.aggregate(query)
user_list = list(user)
if user_list:
return user_list[0]
else:
return None
def find_user_role_for_user_id(self, user_id, project_id):
query = {"user_id": user_id, "project_id": project_id}
filter_dict = {"userrole": 1, "_id": 0}
return self.find_one(query=query, filter_dict=filter_dict)
from typing import Optional
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import DBConstants
from scripts.db.mongo.ilens_configuration.collections import collection_user_role
class UserCollectionKeys:
KEY_LANGUAGE = "language"
KEY_NAME = "name"
KEY_USER_ID = "user_id"
KEY_PROJECT_ID = "project_id"
KEY_USERNAME = "username"
KEY_USER_ROLE = "user_role_name"
KEY_EMAIL = "email"
class UserRole(CollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DBConstants.ilens_configuration, collection=collection_user_role)
self.project_id = project_id
self.key_user_id = UserCollectionKeys.KEY_USER_ID
self.key_project_id = UserCollectionKeys.KEY_PROJECT_ID
def find_user(self, user_id):
user = self.find_one(query={"user_id": user_id})
if user:
return dict(user)
return user
def find_user_name(self, user_id, project_id: Optional[str]):
query = {"user_role_id": user_id, "project_id": project_id}
one_user = self.find_one(filter_dict={"user_role_name": 1, "_id": 0}, query=query)
if one_user is None:
return one_user
return one_user["user_role_name"]
@staticmethod
def get_users_list(project_id=None):
query_json = [
{
"$group": {
"_id": None,
"data": {"$push": {"k": {"$ifNull": ["$user_id", ""]}, "v": {"$ifNull": ["$username", ""]}}},
}
},
{"$replaceRoot": {"newRoot": {"$arrayToObject": "$data"}}},
]
if bool(project_id):
query_json.insert(0, {"$match": {"project_id": project_id}})
return query_json
def users_list_by_aggregate(self, query: list):
return self.aggregate(pipelines=query)
def find_user_by_project_id(self, user_id, project_id):
user = self.find_one(query={self.key_user_id: user_id, self.key_project_id: project_id})
if user:
return dict(user)
return user
def find_user_role_by_id(self, user_role_id, filter_dict=None):
return self.find_one(query={"user_role_id": user_role_id}, filter_dict=filter_dict)
from ut_mongo_util import CollectionBaseClass, mongo_client
from scripts.constants.db_constants import CollectionNames, DBConstants
collection_name = CollectionNames.user_space
class UserCollectionKeys:
KEY_LANGUAGE = "language"
KEY_NAME = "name"
KEY_USER_ID = "user_id"
KEY_SPACE_ID = "space_id"
KEY_USERNAME = "username"
KEY_USER_ROLE = "userrole"
class UserSpace(CollectionBaseClass):
key_username = UserCollectionKeys.KEY_USERNAME
key_user_id = UserCollectionKeys.KEY_USER_ID
key_language = UserCollectionKeys.KEY_LANGUAGE
key_name = UserCollectionKeys.KEY_NAME
key_space_id = UserCollectionKeys.KEY_SPACE_ID
def __init__(self):
super().__init__(
mongo_client,
database=DBConstants.ilens_configuration,
collection=collection_name,
)
def fetch_user_space(self, user_id, space_id):
query = {self.key_user_id: user_id, self.key_space_id: space_id}
user = self.find_one(query=query)
return user
def fetch_user_space_with_details(self, user_id, space_id):
query = [
{"$match": {"user_id": user_id, "space_id": space_id}},
{"$lookup": {"from": "user", "localField": "user_id", "foreignField": "user_id", "as": "user_details"}},
{"$unwind": {"path": "$user_details"}},
{
"$project": {
"project_id": 1,
"AccessLevel": 1,
"access_group_ids": 1,
"userrole": 1,
"user_id": 1,
"name": "$user_details.name",
"email": "$user_details.email",
"username": "$user_details.username",
}
},
]
user = self.aggregate(query)
user_list = list(user)
if user_list:
return user_list[0]
else:
return None
def find_user_role_for_user_id(self, user_id, space_id):
query = {"user_id": user_id, "space_id": space_id}
filter_dict = {"userrole": 1, "_id": 0}
return self.find_one(query=query, filter_dict=filter_dict)
def update_one_user_space(self, data, user_id, space_id):
query = {self.key_user_id: user_id, "space_id": space_id}
return self.update_one(query=query, data=data)
from typing import Optional
from pydantic import BaseModel
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import CollectionNames, DatabaseNames
class CategorySchema(BaseModel):
project_id: Optional[str] = ""
categoryName: Optional[str] = ""
color: Optional[str] = ""
dashboard_category_id: Optional[str] = ""
class Category(CollectionBaseClass):
key_project_id = "project_id"
key_dashboard_category_id = "dashboard_category_id"
key_category_name = "categoryName"
key_category = "category"
key_color = "color"
def __init__(self, mongo_client, project_id=None):
super().__init__(
mongo_client,
database=DatabaseNames.ilens_widget,
collection=CollectionNames.category,
)
self.project_id = project_id
def find_category(self, category_id, project_id=None):
query = {self.key_dashboard_category_id: category_id}
if project_id:
query.update({"project_id": project_id})
filter_dict = {"_id": 0}
return self.find_one(query=query, filter_dict=filter_dict)
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import CollectionNames, DatabaseNames
class Dashboard(CollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(
mongo_client,
database=DatabaseNames.ilens_widget,
collection=CollectionNames.collection_dashboard,
)
self.project_id = project_id
def find_dashboard(self, dashboard_id, project_id=None):
query = {"dashboard_id": dashboard_id}
if project_id:
query.update({"project_id": project_id})
filter_dict = {"_id": 0}
return self.find_one(query=query, filter_dict=filter_dict)
def update_dashboard(self, dashboard_id, data):
query = {"dashboard_id": dashboard_id}
return self.update_one(query=query, data=data, upsert=True)
def find_dashboard_by_ids(self, dashboard_ids, project_id=None):
query = {"dashboard_id": {"$in": dashboard_ids}}
if project_id:
query.update({"project_id": project_id})
filter_dict = {"_id": 0}
return self.find(query=query, filter_dict=filter_dict)
from ut_mongo_util import CollectionBaseClass
from scripts.constants.db_constants import CollectionNames, DatabaseNames
class Widget(CollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(
mongo_client,
database=DatabaseNames.ilens_widget,
collection=CollectionNames.collection_widget,
)
self.project_id = project_id
def find_widgets_by_dashboard(self, dashboard_id):
query = {"dashboard_id": dashboard_id}
return self.find(query=query)
def insert_many_widgets(self, data):
return self.insert_many(data)
def find_widgets_by_dashboard_ids(self, dashboard_ids):
query = {"dashboard_id": {"$in": dashboard_ids}}
filter_dict = {"_id": 0}
return self.find(query=query, filter_dict=filter_dict)
from ut_mongo_util import RedisConfig
from ut_redis_connector import RedisConnector
from scripts.config import RedisConf
connector = RedisConnector(RedisConfig.REDIS_URI)
project_db = connector.connect(db=int(RedisConfig.REDIS_PROJECT_DB), decode_responses=True)
redis_license_check_db = connector.connect(db=int(RedisConf.REDIS_LICENSE_CHECK_DB))
class ILensErrors(Exception):
def __init__(self, msg):
Exception.__init__(self, msg)
"""
Base Error Class
"""
class ErrorMessages:
UNKNOWN = "Unknown Error occurred"
ERR001 = "Configurations not available, please verify the database."
ERR002 = "Data Not Found"
ERR003 = "User Record Not Found"
LOOKUPSERROR = "Could not find scadas configured in lookups for this app"
SELECTEDDETAILSERROR = "Could not find selected all details"
INCORRECTDETAILS = "Incorrect details"
UNABLETOCONNECT = "Unable to connect with global catalog module"
RULEDEFERROR = "Unable to get rule def details from global catalog module"
CONFLICTSERROR = "Unable to detect conflicts"
GLOBALCATALOGDETAILSERROR = "Unable to connect with get required details from global catalog module"
CONNECTIONERROR = "Unable to connect to global catalogue to fetch material details {e}"
WORKFLOWERROR = "workflow does not exist"
USERMAPPINGERROR = "Failed to create user mapping"
VERSIONERROR = "Failed to fetch available version list"
PIPELINE_DETAILS_ERROR = "Unable to find pipeline details"
ERROR001 = "Authentication Failed. Please verify token"
ERROR002 = "Signature Expired"
ERROR003 = "Signature Not Valid"
ERROR004 = "User Record Not Found"
class JobCreationError(Exception):
"""
Raised when a Job Creation throws an exception.
Job Creation happens by adding a record to Mongo.
"""
class UnknownError(Exception):
pass
class DuplicateSpaceNameError(Exception):
pass
class KairosDBError(Exception):
pass
class UnauthorizedError(Exception):
pass
class ImageValidation(Exception):
pass
class ILensError(Exception):
pass
class NameExists(Exception):
pass
class InputRequestError(ILensError):
pass
class IllegalTimeSelectionError(ILensError):
pass
class DataNotFound(Exception):
pass
class AuthenticationError(ILensError):
"""
JWT Authentication Error
"""
class JWTDecodingError(Exception):
pass
class DuplicateReportNameError(Exception):
pass
class PathNotExistsException(Exception):
pass
class ImplementationError(Exception):
pass
class UserRoleNotFoundException(Exception):
pass
class CustomError(Exception):
pass
class IllegalToken(ILensErrors):
pass
class InvalidPasswordError(ILensErrors):
pass
class UserNotFound(ILensErrors):
pass
class TooManyRequestsError(Exception):
pass
class FixedDelayError(ILensErrors):
pass
class VariableDelayError(ILensErrors):
pass
class LicenceValidationError(Exception):
pass
class CustomAppError:
FAILED_TO_SAVE = "Failed to save app"
class GlobalCatalogError(Exception):
"""Generic GlobalcatalogErrors Error"""
def __init__(self, msg):
Exception.__init__(self, msg)
"""
Base Error Class
"""
class UserAccessExceptions:
UAE = "User doesn't have the access"
class UserExceptions:
UID_UN_NOT_EMPTY = "Both user_id and user_name cannot be empty"
FAILED_TO_SAVE = "Failed to save the user details"
USER_INACTIVE = "User Inactive, please contact administrator"
INCORRECT_USERID = "Failed. Incorrect user id or user id doesn’t exist. Please try again."
USER_NOT_EXIST = "User does not Exists"
KEYCLOAK_CONNECTION_ERROR = "Failed to connect to keycloak"
KEYCLOAK_USER_CREATION_ERROR = "Failed to create user in keycloak"
class DefaultExceptionsCode:
DE001 = "Failed to login! Unauthorised!"
DE002 = "Failed to login! Invalid authentication code"
DE003 = "Your password has expired. Please click 'Forgot Password' to reset it and continue using your account."
DEIP = "Failed to login! Unauthorised!"
DEIL = "Failed to login! Please contact administrator"
DE004 = "Too many failed attempts! Please try again after 10 seconds"
DE006 = "Please wait a moment before attempting to login again."
class EmailMessage:
EME001 = "Secret key to reset the password has been sent to registered email address."
class CaptchaError:
VALIDATION_FAILED = "Captcha Validation failed"
import logging
import os
import pathlib
from logging import StreamHandler
from logging.handlers import RotatingFileHandler, SocketHandler
from scripts.config import Service
def read_configuration():
return {
"name": "global-catalog",
"handlers": [
{
"type": "RotatingFileHandler",
"max_bytes": 100000000,
"back_up_count": 5,
"enable": False if os.environ.get("ENABLE_FILE_LOG", False) in [False, "False", "false"] else True,
},
{
"type": "StreamHandler",
"enable": False if os.environ.get("ENABLE_CONSOLE_LOG", True) in [False, "False", "false"] else True,
},
],
}
logging_config = read_configuration()
def get_logger():
"""
Creates a rotating log
"""
__logger__ = logging.getLogger("UT")
_level = Service.LOG_LEVEL.upper()
if not _level:
_level = "INFO"
__logger__.setLevel(_level)
log_formatter = "%(asctime)s - %(levelname)-6s - [%(threadName)5s:%(funcName)5s(): %(lineno)s] - %(message)s"
time_format = "%Y-%m-%d %H:%M:%S"
formatter = logging.Formatter(log_formatter, time_format)
for each_handler in logging_config["handlers"]:
if each_handler["type"] in ["RotatingFileHandler"] and each_handler.get("enable", False):
pathlib.Path("logs").mkdir(parents=True, exist_ok=True)
log_file = pathlib.Path("logs", f"{Service.MODULE_NAME}.log")
__logger__.debug("File Logger Enabled")
temp_handler = RotatingFileHandler(
log_file, maxBytes=each_handler["max_bytes"], backupCount=each_handler["back_up_count"]
)
temp_handler.setFormatter(formatter)
elif each_handler["type"] in ["StreamHandler"] and each_handler.get("enable", True):
__logger__.debug("Console Logger Enabled")
temp_handler = StreamHandler()
temp_handler.setFormatter(formatter)
elif each_handler["type"] in ["SocketHandler"]:
temp_handler = SocketHandler(each_handler["host"], each_handler["port"])
else:
continue
__logger__.addHandler(temp_handler)
return __logger__
logger = get_logger()
logger:
name: catalog_management
level: DEBUG
handlers:
- type: RotatingFileHandler
file_path: logs/
max_bytes: 100000000
back_up_count: 5
- type: StreamHandler
name: catalog_management
from fastapi import APIRouter
router = APIRouter()
import json
from functools import lru_cache
@lru_cache()
def get_db_name(redis_client, project_id: str, database: str, delimiter="__"):
if not project_id:
return database
val = redis_client.get(project_id)
if val is None:
raise ValueError(f"Unknown Project, Project ID: {project_id} Not Found!!!")
val = json.loads(val)
if not val:
return database
# Get the prefix flag to apply project_id prefix to any db
prefix_condition = bool(val.get("source_meta", {}).get("add_prefix_to_database"))
if prefix_condition:
# Get the prefix name from mongo or default to project_id
prefix_name = val.get("source_meta", {}).get("prefix") or project_id
return f"{prefix_name}{delimiter}{database}"
return database
@lru_cache()
def get_redis_db_prefix(redis_client, project_id: str, delimiter="__"):
if not project_id:
return False
val = redis_client.get(project_id)
if val is None:
return False
val = json.loads(val)
if not val:
return False
# Get the prefix flag to apply project_id prefix to any db
prefix_condition = bool(val.get("source_meta", {}).get("add_prefix_to_database"))
if prefix_condition:
# Get the prefix name from mongo or default to project_id
prefix_name = val.get("source_meta", {}).get("prefix") or project_id
return f"{prefix_name}{delimiter}"
return False
def get_project_data_from_redis(redis_client, project_id: str):
record = redis_client.get(project_id)
if record is None:
raise ValueError(f"Unknown Project, Project ID: {project_id} Not Found!!!")
if record := json.loads(record):
return record
import json
import paho.mqtt.client as mqtt
from scripts.config import MQTTConf
from scripts.logging import logger
def on_connect(rc):
logger.debug(f"Publisher Connected with result code {str(rc)}")
def push_notification(notification, user_id):
try:
client = mqtt.Client()
client.username_pw_set(MQTTConf.MQTT_USERNAME, MQTTConf.MQTT_PASSWORD)
client.on_connect = on_connect
client.connect(MQTTConf.MQTT_HOST, MQTTConf.MQTT_PORT, 30)
topic = f"{MQTTConf.PUBLISH_BASE_TOPIC}/{user_id}/tasks"
if not client.is_connected():
client.reconnect()
client.publish(topic, json.dumps(notification), retain=False, qos=1)
logger.info(f"Notification message published to {topic}")
logger.debug(f"Notification: {notification}")
client.disconnect()
return True
except Exception as e:
logger.exception(f"Exception at MQTT Publish: {e}")
return False
import base64
from Cryptodome import Random
from Cryptodome.Cipher import AES
class AESCipher:
"""
A classical AES Cipher. Can use any size of data and any size of password thanks to padding.
Also ensure the coherence and the type of the data with a unicode to byte converter.
"""
def __init__(self, key):
self.bs = 16
self.key = AESCipher.str_to_bytes(key)
@staticmethod
def str_to_bytes(data):
u_type = type(b"".decode("utf8"))
if isinstance(data, u_type):
return data.encode("utf8")
return data
def _pad(self, s):
return s + (self.bs - len(s) % self.bs) * AESCipher.str_to_bytes(chr(self.bs - len(s) % self.bs))
@staticmethod
def _unpad(s):
return s[: -ord(s[len(s) - 1 :])]
def encrypt(self, raw):
raw = self._pad(AESCipher.str_to_bytes(raw))
iv = Random.new().read(AES.block_size)
cipher = AES.new(self.key, AES.MODE_GCM, iv)
return base64.b64encode(iv + cipher.encrypt(raw)).decode("utf-8")
def decrypt(self, enc):
enc = base64.b64decode(enc)
iv = enc[: AES.block_size]
cipher = AES.new(self.key, AES.MODE_GCM, iv)
data = self._unpad(cipher.decrypt(enc[AES.block_size :]))
return data.decode("utf-8")
import uuid
from datetime import datetime, timedelta
from ut_security_util.security_tools.redis_conections import login_db
from scripts.constants import Secrets
from scripts.logging import logger
from scripts.utils.security_utils.jwt_util import JWT
jwt = JWT()
def create_token(user_id, ip, token, age=Secrets.LOCK_OUT_TIME_MINS, login_token=None, space_id=None):
"""
This method is to create a cookie
"""
try:
uid = login_token
if not uid:
uid = str(uuid.uuid4()).replace("-", "")
payload = {"ip": ip, "user_id": user_id, "token": token, "uid": uid, "age": age}
if space_id:
payload["space_id"] = space_id
exp = datetime.utcnow() + timedelta(minutes=age)
_extras = {"iss": Secrets.issuer, "exp": exp}
_payload = {**payload, **_extras}
new_token = jwt.encode(_payload)
# Add session to redis
login_db.set(uid, new_token)
login_db.expire(uid, timedelta(minutes=age))
return uid
except Exception as e:
logger.error(e)
raise e
from secrets import compare_digest
from typing import Optional
from fastapi import HTTPException, Request, Response, status
from fastapi.openapi.models import APIKey, APIKeyIn
from fastapi.security import APIKeyCookie
from fastapi.security.api_key import APIKeyBase
from pydantic import BaseModel, Field
from ut_security_util.security_tools.redis_conections import login_db
from scripts.config import Service
from scripts.constants import Secrets
from scripts.logging import logger
from scripts.utils.security_utils.apply_encryption_util import create_token
from scripts.utils.security_utils.jwt_util import JWT
class CookieAuthentication(APIKeyBase):
"""
Authentication backend using a cookie.
Internally, uses a JWT token to store the data.
"""
scheme: APIKeyCookie
cookie_name: str
cookie_secure: bool
def __init__(
self,
cookie_name: str = "login-token",
):
super().__init__()
self.model: APIKey = APIKey(**{"in": APIKeyIn.cookie}, name=cookie_name)
self.scheme_name = self.__class__.__name__
self.cookie_name = cookie_name
self.scheme = APIKeyCookie(name=self.cookie_name, auto_error=False)
self.login_redis = login_db
self.jwt = JWT()
async def __call__(self, request: Request, response: Response) -> str:
cookies = request.cookies
login_token = cookies.get("login-token")
if not login_token:
login_token = request.headers.get("login-token")
if not login_token:
raise HTTPException(status_code=401)
jwt_token = self.login_redis.get(login_token)
if not jwt_token:
raise HTTPException(status_code=401)
try:
decoded_token = self.jwt.validate(token=jwt_token)
if not decoded_token:
raise HTTPException(status_code=401)
except Exception as e:
raise HTTPException(status_code=401, detail=e.args)
user_id = decoded_token.get("user_id", decoded_token.get("userId"))
space_id = decoded_token.get("space_id", decoded_token.get("spaceId"))
_token = decoded_token.get("token")
_age = int(decoded_token.get("age", Secrets.LOCK_OUT_TIME_MINS))
if any(
[
not compare_digest(Secrets.token, _token),
login_token != decoded_token.get("uid"),
]
):
raise HTTPException(status_code=401)
request.cookies.update({"user_id": user_id, "space_id": space_id, "spaceId": space_id, "userId": user_id})
try:
new_token = create_token(
user_id=user_id,
ip=request.client.host,
token=Secrets.token,
age=_age,
login_token=login_token,
space_id=space_id,
)
except Exception as e:
raise HTTPException(status_code=401, detail=e.args)
response.set_cookie(
"login-token",
new_token,
samesite="strict",
httponly=True,
secure=Service.secure_cookie,
max_age=Secrets.LOCK_OUT_TIME_MINS * 60,
)
# If project ID is null, this is susceptible to 500 Status Code. Ensure token formation has project ID in
# login token
if not space_id:
logger.info("Project ID not found in Old token. Soon to be deprecated. Proceeding for now")
response.headers.update({"login-token": new_token, "userId": user_id, "user_id": user_id})
return user_id
response.headers.update(
{
"login-token": new_token,
"spaceId": space_id,
"space_id": space_id,
"userId": user_id,
"user_id": user_id,
}
)
return user_id
class MetaInfoSchema(BaseModel):
spaceId: Optional[str] = ""
space_id: Optional[str] = ""
user_id: Optional[str] = ""
language: Optional[str] = ""
ip_address: Optional[str] = ""
login_token: Optional[str] = Field(alias="login-token")
class Config:
allow_population_by_field_name = True
class MetaInfoCookie(APIKeyBase):
"""
Project ID backend using a cookie.
"""
scheme: APIKeyCookie
def __init__(self):
super().__init__()
self.model: APIKey = APIKey(**{"in": APIKeyIn.cookie}, name="meta")
self.scheme_name = self.__class__.__name__
def __call__(self, request: Request, response: Response):
cookies = request.cookies
cookie_json = {
"spaceId": cookies.get("spaceId", request.headers.get("spaceId")),
"userId": cookies.get("user_id", cookies.get("userId", request.headers.get("userId"))),
"language": cookies.get("language", request.headers.get("language")),
}
return MetaInfoSchema(
space_id=cookie_json["spaceId"],
user_id=cookie_json["userId"],
spaceId=cookie_json["spaceId"],
language=cookie_json["language"],
ip_address=request.client.host,
login_token=cookies.get("login-token"),
)
class GetUserID(APIKeyBase):
"""
Project ID backend using a cookie.
"""
scheme: APIKeyCookie
def __init__(self):
super().__init__()
self.model: APIKey = APIKey(**{"in": APIKeyIn.cookie}, name="user_id")
self.scheme_name = self.__class__.__name__
def __call__(self, request: Request, response: Response):
if user_id := request.cookies.get("user_id", request.cookies.get("userId", request.headers.get("userId"))):
return user_id
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED)
import jwt
from jwt.exceptions import (
ExpiredSignatureError,
InvalidSignatureError,
MissingRequiredClaimError,
)
from scripts.config import KeyPath
from scripts.constants import Secrets
from scripts.errors import AuthenticationError, ErrorMessages
from scripts.logging import logger
class JWT:
def __init__(self):
self.max_login_age = Secrets.LOCK_OUT_TIME_MINS
self.issuer = Secrets.issuer
self.alg = Secrets.alg
self.public = KeyPath.PUBLIC
self.private = KeyPath.PRIVATE
def encode(self, payload):
try:
with open(self.private) as f:
key = f.read()
return jwt.encode(payload, key, algorithm=self.alg)
except Exception as e:
logger.exception(f"Exception while encoding JWT: {str(e)}")
raise
finally:
f.close()
def decode(self, token):
try:
with open(self.public) as f:
key = f.read()
return jwt.decode(token, key, algorithms=self.alg)
except Exception as e:
logger.exception(f"Exception while encoding JWT: {str(e)}")
raise
finally:
f.close()
def validate(self, token):
try:
with open(self.public) as f:
key = f.read()
payload = jwt.decode(
token,
key,
algorithms=self.alg,
leeway=Secrets.leeway_in_mins,
options={"require": ["exp", "iss"]},
)
return payload
except InvalidSignatureError:
raise AuthenticationError(ErrorMessages.ERROR003)
except ExpiredSignatureError:
raise AuthenticationError(ErrorMessages.ERROR002)
except MissingRequiredClaimError:
raise AuthenticationError(ErrorMessages.ERROR002)
except Exception as e:
logger.exception(f"Exception while validating JWT: {str(e)}")
raise
finally:
f.close()
from typing import Optional
from fastapi import Request, Response
from fastapi.openapi.models import APIKey, APIKeyIn
from fastapi.security.api_key import APIKeyBase, APIKeyCookie
from pydantic import BaseModel
class MetaInfoSchema(BaseModel):
space_id: Optional[str] = ""
user_id: Optional[str] = ""
language: Optional[str] = ""
class MetaInfoCookie(APIKeyBase):
"""
Project ID backend using a cookie.
"""
scheme: APIKeyCookie
cookie_name: str
def __init__(self, cookie_name: str = "spaceId"):
super().__init__()
self.model: APIKey = APIKey(**{"in": APIKeyIn.cookie}, name=cookie_name)
self.cookie_name = cookie_name
self.scheme_name = self.__class__.__name__
self.scheme = APIKeyCookie(name=self.cookie_name, auto_error=False)
async def __call__(self, request: Request, response: Response):
cookies = request.cookies
cookie_json = {
"spaceId": cookies.get("spaceId", request.headers.get("spaceId")),
"userId": cookies.get("user_id", cookies.get("userId", request.headers.get("userId"))),
"language": cookies.get("language", request.headers.get("language")),
}
return MetaInfoSchema(
space_id=cookie_json["spaceId"], user_id=cookie_json["userId"], language=cookie_json["language"]
)
@staticmethod
def set_response_info(cookie_name, cookie_value, response: Response):
response.set_cookie(cookie_name, cookie_value, samesite="strict", httponly=True)
response.headers[cookie_name] = cookie_value
import logging
from datetime import datetime, timedelta, timezone
from functools import lru_cache, wraps
import orjson as json
from fastapi import HTTPException, Request, status
from ut_mongo_util import mongo_client
from ut_security_util.security_tools.redis_conections import user_role_permissions_redis
from scripts.db.mongo.ilens_configuration.collections.user import User
from scripts.db.mongo.ilens_configuration.collections.user_space import UserSpace
from scripts.errors import ErrorMessages, ILensError
def timed_lru_cache(seconds: int = 10, maxsize: int = 128):
def wrapper_cache(func):
func = lru_cache(maxsize=maxsize)(func)
func.lifetime = timedelta(seconds=seconds)
func.expiration = datetime.now(timezone.utc) + func.lifetime
@wraps(func)
def wrapped_func(*args, **kwargs):
if datetime.now(timezone.utc) >= func.expiration:
logging.debug("Cache Expired")
func.cache_clear()
func.expiration = datetime.now(timezone.utc) + func.lifetime
return func(*args, **kwargs)
return wrapped_func
return wrapper_cache
@timed_lru_cache(seconds=60, maxsize=1000)
def get_user_role_id(user_id, space_id):
logging.debug("Fetching user role from DB")
user_conn = User(mongo_client=mongo_client) # user collection from ilens_configuration DB
if user_role := user_conn.find_user_role_for_user_id(user_id=user_id, project_id=space_id):
return user_role["userrole"][0]
# if user not found in primary collection, check if user is in project collection
user_proj_conn = UserSpace() # user_project collection from ilens_configuration DB
if user_role := user_proj_conn.find_user_role_for_user_id(user_id=user_id, space_id=space_id):
return user_role["userrole"][0]
class RBAC:
def __init__(self, entity_name: str, operation: list[str]):
self.entity_name = entity_name
self.operation = operation
def check_permissions(self, user_id: str, space_id: str) -> dict[str, bool]:
user_role_id = get_user_role_id(user_id, space_id)
if not user_role_id:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User role not found!")
r_key = f"{space_id}__{user_role_id}" # eg: project_100__user_role_100
user_role_rec = user_role_permissions_redis.hget(r_key, self.entity_name)
if not user_role_rec:
raise ILensError(ErrorMessages.ERROR004)
user_role_rec = json.loads(user_role_rec)
if permission_dict := {i: True for i in self.operation if user_role_rec.get(i)}:
return permission_dict
else:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Insufficient Permission!")
def __call__(self, request: Request) -> dict[str, bool]:
user_id = request.cookies.get("userId", request.headers.get("userId"))
space_id = request.cookies.get("projectId", request.headers.get("projectId"))
return self.check_permissions(user_id=user_id, space_id=space_id)
aiohttp==3.6.2
aiohttp-cors==0.7.0
apscheduler~=3.7.0
async-timeout==3.0.1
attrs==20.2.0
certifi==2020.6.20
chardet==3.0.4
click==7.1.2
colorclass==2.2.0
colorlog==4.2.1
croniter==0.3.34
faust==1.10.4
grequests~=0.6.0
httpx==0.18.2
idna==2.10
kafka-python==1.4.7
mode==4.3.2
mypy-extensions==0.4.3
natsort==7.0.1
opentracing==1.3.0
paho-mqtt==1.5.0
pydantic==1.8.2
pydantic[dotenv]
PyJWT==2.0.1
pyjwt[crypto]
pymongo==3.11.0
python-dateutil==2.8.1
python-dotenv~=0.18.0
pytz~=2021.1
pyXSteam==0.4.8
redis==3.5.3
requests==2.24.0
robinhood-aiokafka==1.1.6
rq==1.5.2
six==1.15.0
terminaltables==3.1.0
typing-extensions==3.7.4.3
urllib3==1.25.10
venusian==1.2.0
def main():
try:
value = None
"""write your code here"""
return value
except Exception as e:
print(f"Failed {e}")
deployment:
environmentVar:
- name: APP_NAME
value: "catalog-management"
- name: MODULE_NAME
value: "Catalog-Management"
- name: PORT
value: "45561"
- name: LOG_LEVEL
value: "INFO"
- name: BASE_DIRECTORY
value: "/code/data"
- name: MOUNT_DIR
value: "/catalog-management"
- name: GLOBAL_CATALOG_SERVICES
value: {{ GLOBAL_CATALOG_SERVICES }}
- name: GLOBAL_CATALOG_PROJECT_ID
value: {{ GLOBAL_CATALOG_PROJECT_ID }}
- name: GLOBAL_CATALOG_USER
value: {{ GLOBAL_CATALOG_USER }}
- name: REQUEST_TIMEOUT
value: "30"
- name: FTDM_CATALOG_PROXY
value: "/catalog-management"
- name: GC_BEARER_TOKEN
value: "Z2xvYmFsLWNhdGFsb2ctbG9naW4tdG9rZW4="
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment