Commit 4233e9a5 authored by madhuri.penikalapati's avatar madhuri.penikalapati

first commit

parents
Pipeline #56215 failed with stage
#Ignore the logs directory
logs/
#Ignoring the password file
passwords.txt
#Ignoring git and cache folders
.git
.cache
.gitignore
.gitlab-ci.yml
variables.yml
#Ignoring all the markdown and class files
*.md
**/*.class
.env
__pycache__
*.pyc
*.pyo
*.pyd
.Python
.env
pip-log.txt
pip-delete-this-directory.txt
.tox
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*,cover
*.log
\ No newline at end of file
MONGO_URI = mongodb://192.168.0.220:2717/
APP_NAME = ilens_scheduler
SCHEDULER_THREAD=120
SCHEDULER_PROCESS=20
MAX_INSTANCE=200
MISFIRE_SEC = 180
BASE_PATH=/data
MOUNT_DIR=/ilens_scheduler
REDIS_URI=redis://192.168.0.220:6379
SECURE_ACCESS = True
SW_DOCS_URL=/docs
SW_OPENAPI_URL=/openapi.json
ENABLE_CORS=True
CORS_URLS=staging.ilens.io
SECURE_COOKIE=True
VERIFY_SIGNATURE = True
PROTECTED_HOSTS = "*.unifytwin.com,*.ilens.io"
PORT = 1234
This diff is collapsed.
This diff is collapsed.
#!/usr/bin bash
pip install ruff black isort --upgrade
ruff scripts
black scripts --check
isort scripts --check-only
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.3.0
hooks:
- id: end-of-file-fixer
- id: trailing-whitespace
- id: requirements-txt-fixer
- repo: https://github.com/omnilib/ufmt
rev: v2.0.0
hooks:
- id: ufmt
additional_dependencies:
- black == 22.6.0
- usort == 1.0.4
- repo: https://github.com/PyCQA/flake8
rev: 5.0.4
hooks:
- id: flake8
args:
- "--max-line-length=120"
- "--max-complexity=20"
- "--select=B,C,E,F,W,T4,B9"
# these are errors that will be ignored by flake8
# check out their meaning here
# https://flake8.pycqa.org/en/latest/user/error-codes.html
- "--ignore=E203,E266,E501,W503,F403,F401,E402"
FROM tiangolo/uvicorn-gunicorn-fastapi:python3.9-slim
COPY requirements.txt /app/requirements.txt
WORKDIR /app
RUN pip install -r requirements.txt
RUN apt update && apt install curl -y
COPY . /app
# iLens - DevOps
### GitFlow
Below is the list of branches for which CI/CD is configured.
### Branches
| Branch | Description | URL |
| --------- | ------------------------------------------------------------------------- | -------------------------------------------------------------- |
| `master` | Tag and Release the new Version. |-|
| `QA` | Deploy to Non-production, testing environment - AKS. | https://qa.ilens.io/ |
| `develop` | Deploy to the 220-Server and Dev-Kubernetes Cluster. | http://192.168.0.220/dev_master/ and http://192.168.0.236/dev_master/ |
| `feature/<feature_name>` | This holds the code base for feature update. |-|
| `patch/<patch_name>` | This holds the code base for patch update. |-|
- There are two environments for Development Team in which, one of them is a self hosted Kubernetes Cluster - http://192.168.0.236/dev_master/ and the other environment where source code is deployed - http://192.168.0.220/dev_master/ .
- The QA Environment is a AKS Cluster - https://qa.ilens.io/
- Production environments are all client environments.
### Merge Requests
1. When a Merge Request is raised that targets `develop` and `QA` branches, pipelines will be triggered for the deployment in the respective environments if the merge is completed.
1. When a feature update has to be made, a new branch named **`feature/`<feature_name>** has to be created from `master` branch. Once development is completed, the code should be merged back to `master` branch for which auto-tagging will happen.
1. When a patch update has to be made, a new branch named **`patch/`<patch_name>** has to be created from `master` branch. Once development is completed, the code should be merged back to `master` branch for which auto-tagging will happen.
The same is depicted in the below diagram:
<img src="https://gitlab-pm.knowledgelens.com/KnowledgeLens/Products/iLens-2.0/core/devops/scripts/-/raw/auto-tagging-ci/Patch-Feature-Flow.png" alt="Merge request"/>
### Environment
* [ ] MONGO_URI=mongodb://192.168.0.220:2717
* [ ] APP_NAME=ilens_scheduler
* [ ] SCHEDULER_THREAD=120
* [ ] SCHEDULER_PROCESS=20
* [ ] MAX_INSTANCE=200
* [ ] REDIS_URI=redis://192.168.0.220:6379
Version: v6.8
Release Note:
Enhancements:
- Fixed code smells .
- Added Redis URI.
\ No newline at end of file
if __name__ == '__main__':
from dotenv import load_dotenv
load_dotenv()
import uvicorn
from main import app
from scripts.constants.app_configuration import Service
from scripts.core.engine.scheduler_engine import scheduler
from scripts.logging.logging import logger
service_obj = Service()
scheduler.start()
if __name__ == "__main__":
try:
app.root_path = "ilens-scheduler"
logger.info("Starting the scheduler framework")
logger.info("Scheduler framework started successfully")
uvicorn.run("main:app", host=service_obj.host, port=int(service_obj.port))
except (KeyboardInterrupt, SystemExit):
scheduler.shutdown()
raise
-----BEGIN RSA PRIVATE KEY-----
MIICWwIBAAKBgQClilTaeHq6Zc+kWHCNl1O0btGRm7ct3O5zqWx1mwwLUWH14eft
Hi5wIbOYh79JQ9BO2OA4UjPq31uwmJ96Okl0OULfENhwd/D7P3mnoRlktPT2t+tt
RRrKvx3wNpOy/3nBsXnNt8EKxyA7k9vbqLbv9pGw2hcqOYe/NGTkmm1PswIDAQAB
AoGAZPARR1l5NBkKYGKQ1rU0E+wSmx+AtVVmjF39RUSyNmB8Q+poebwSgsr58IKt
T6Yq6Tjyl0UAZTGmferCK0xJJrqyP0hMn4nNNut+acWMKyt+9YrA2FO+r5Jb9JuT
SK35xXnM4aZLGppgWJxRzctpIz+qkf6oLRSZme0AuiqcwYECQQDY+QDL3wbWplRW
bze0DsZRMkDAkNY5OCydvjte4SR/mmAzsrpNrS5NztWbaaQrefoPbsdYBPbd8rS7
C/s/0L1zAkEAw1EC5zt2STuhkcKLa/tL+bk8WHHHtf19aC9kBj1TvWBFh+JojWCo
86iK5fLcHzhyQx5Qi3E9LG2HvOWhS1iUwQJAKbEHHyWW2c4SLJ2oVXf1UYrXeGkc
UNhjclgobl3StpZCYAy60cwyNo9E6l0NR7FjhG2j7lzd1t4ZLkvqFmQU0wJATLPe
yQIwBLh3Te+xoxlQD+Tvzuf3/v9qpWSfClhBL4jEJYYDeynvj6iry3whd91J+hPI
m8o/tNfay5L+UcGawQJAAtbqQc7qidFq+KQYLnv5gPRYlX/vNM+sWstUAqvWdMze
JYUoTHKgiXnSZ4mizI6/ovsBOMJTb6o1OJCKQtYylw==
-----END RSA PRIVATE KEY-----
-----BEGIN PUBLIC KEY-----
MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQClilTaeHq6Zc+kWHCNl1O0btGR
m7ct3O5zqWx1mwwLUWH14eftHi5wIbOYh79JQ9BO2OA4UjPq31uwmJ96Okl0OULf
ENhwd/D7P3mnoRlktPT2t+ttRRrKvx3wNpOy/3nBsXnNt8EKxyA7k9vbqLbv9pGw
2hcqOYe/NGTkmm1PswIDAQAB
-----END PUBLIC KEY-----
[SERVICE]
port=$PORT
host=0.0.0.0
enable_security=false
allow_cross_origin=true
secure_cookie = $SECURE_COOKIE
[MONGO_DB]
uri = $MONGO_URI
[SCHEDULER]
SCHEDULER_THREAD=$SCHEDULER_THREAD
SCHEDULER_PROCESS=$SCHEDULER_PROCESS
MAX_INSTANCE=$MAX_INSTANCE
MISFIRE_SEC = $MISFIRE_SEC
[PATH]
base_path = $BASE_PATH
mount_dir = $MOUNT_DIR
[REDIS]
REDIS_URI = $REDIS_URI
login_db = 9
user_role_permissions = 21
project_tags_db = 18
\ No newline at end of file
if __name__ == '__main__':
from dotenv import load_dotenv
load_dotenv()
import uvicorn
from main import app
from scripts.constants.app_configuration import Service
from scripts.core.engine.scheduler_engine import scheduler
from scripts.logging.logging import logger
service_obj = Service()
scheduler.start()
if __name__ == "__main__":
try:
logger.info("Starting the scheduler framework")
logger.info("Scheduler framework started successfully")
uvicorn.run(app, host=service_obj.host, port=int(service_obj.port))
except (KeyboardInterrupt, SystemExit):
scheduler.shutdown()
raise
MONGO_URI=mongodb://ilens:ilens4321@192.168.0.220:2717/?authSource=admin
APP_NAME=ilens_scheduler
SCHEDULER_THREAD=120
SCHEDULER_PROCESS=20
MAX_INSTANCE=200
MISFIRE_SEC=180
BASE_PATH=/data
MOUNT_DIR=/ilens_scheduler
REDIS_URI=redis://192.168.0.220:6379
SECURE_ACCESS=False
SW_DOCS_URL=/docs
SW_OPENAPI_URL=/openapi.json
ENABLE_CORS=True
CORS_URLS=staging.ilens.io
SECURE_COOKIE=False
VERIFY_SIGNATURE=False
PROTECTED_HOSTS="*.unifytwin.com,*.ilens.io"
PORT=1234
if __name__ == '__main__':
from dotenv import load_dotenv
load_dotenv()
import os
import uvicorn
from fastapi import FastAPI, Depends
from fastapi.middleware.cors import CORSMiddleware
from jwt_signature_validator.encoded_payload import (EncodedPayloadSignatureMiddleware)
from scripts.constants.app_configuration import Service
from scripts.constants.app_constants import Secrets
from scripts.core.engine.scheduler_engine import scheduler
from scripts.logging.logging import logger
from scripts.services.scheduler import scheduler_router, health_check
from scripts.utils.security_utils.decorators import CookieAuthentication
secure_access = os.environ.get("SECURE_ACCESS", default=False)
protected_hosts = os.environ.get("PROTECTED_HOSTS", "").split(",")
verify_signature = os.environ.get("verify_signature", default=False)
auth = CookieAuthentication()
app = FastAPI(
title="Scheduler Microservice",
version="1.0",
description="Scheduler App",
openapi_url=os.environ.get("SW_OPENAPI_URL", default="/openapi.json"),
docs_url=os.environ.get("SW_DOCS_URL"),
redoc_url=None
)
if os.environ.get("ENABLE_CORS") in (True, 'true', 'True') and os.environ.get("CORS_URLS"):
origins_list = os.environ.get("CORS_URLS", default="")
origins_list = origins_list.split(',') if origins_list else ["*"]
if verify_signature:
app.add_middleware(
EncodedPayloadSignatureMiddleware,
jwt_secret=Secrets.signature_key,
jwt_algorithms=Secrets.signature_key_alg,
protect_hosts=protected_hosts,
)
if secure_access in [True, 'true', 'True']:
app.include_router(scheduler_router, dependencies=[Depends(auth)])
else:
app.include_router(scheduler_router)
app.include_router(health_check)
service_obj = Service()
if __name__ == "__main__":
try:
logger.info("Starting the scheduler framework")
scheduler.start()
logger.info("Scheduler framework started successfully")
uvicorn.run(app, host=service_obj.host, port=service_obj.port)
except (KeyboardInterrupt, SystemExit):
scheduler.shutdown()
raise
[tool.black]
line-length = 120
[tool.isort]
profile = "black"
[tool.ruff]
select = [
"E", # pycodestyle errors
"W", # pycodestyle warnings
"F", # pyflakes
# "I", # isort
"C", # flake8-comprehensions
"B", # flake8-bugbear
]
ignore = [
"E501", # line too long, handled by black
"B008", # do not perform function calls in argument defaults
"C901", # too complex
"E402",
"B904",
"B905",
"B009"
]
[tool.ruff.per-file-ignores]
"__init__.py" = ["F401"]
APScheduler==3.7.0
fastapi==0.65.2
jwt-signature-validator~=0.0.1
pre-commit~=2.20.0
pyaml==20.4.0
pydantic==1.8.2
PyJWT==2.4.0
pymongo==3.11.3
python-dotenv~=0.17.1
pytz==2021.1
redis~=3.5.3
requests==2.25.1
uvicorn[standard]~=0.18.2
cryptography==38.0.3
orjson==3.8.1
setuptools==65.5.1
\ No newline at end of file
if __name__ == '__main__':
from dotenv import load_dotenv
load_dotenv()
import os
import sys
from configparser import BasicInterpolation, ConfigParser
class EnvInterpolation(BasicInterpolation):
"""
Interpolation which expands environment variables in values.
"""
def before_get(self, parser, section, option, value, defaults):
value = super().before_get(parser, section, option, value, defaults)
if not os.path.expandvars(value).startswith('$'):
return os.path.expandvars(value)
else:
return
try:
config = ConfigParser(interpolation=EnvInterpolation())
config.read(f"conf/application.conf")
except Exception as e:
print(f"Error while loading the config: {e}")
print("Failed to Load Configuration. Exiting!!!")
sys.stdout.flush()
sys.exit()
class PathToStorage:
BASE_PATH = config.get("PATH", "base_path")
if not BASE_PATH:
print("Error, environment variable BASE_PATH not set")
sys.exit(1)
MOUNT_DIR = config.get("PATH", "mount_dir")
if not MOUNT_DIR:
print("Error, environment variable MOUNT_DIR not set")
sys.exit(1)
LOGS_MODULE_PATH = f"{BASE_PATH}/logs{MOUNT_DIR}/"
class Service:
host = config.get("SERVICE", "host")
port = config.getint("SERVICE", "port")
enable_security = config.getboolean("SERVICE", "enable_security")
allow_cross_origin = config.getboolean("SERVICE", "allow_cross_origin")
secure_cookie = config.getboolean("SERVICE", "secure_cookie", fallback=True)
class Mongo:
uri = config.get("MONGO_DB", "uri")
class SchedulerConf:
scheduler_thread = config.get("SCHEDULER", "SCHEDULER_THREAD")
scheduler_thread = 120 if not scheduler_thread else int(scheduler_thread)
scheduler_process = config.get("SCHEDULER", "SCHEDULER_PROCESS")
scheduler_process = 5 if not scheduler_process else int(scheduler_process)
max_instance = config.get("SCHEDULER", "MAX_INSTANCE")
max_instance = 50 if not max_instance else int(max_instance)
misfire_grace_time_in_s = config.getint("SCHEDULER", "MISFIRE_SEC", fallback=3)
class KeyPath(object):
public = os.path.join("assets", "keys", "public")
private = os.path.join("assets", "keys", "private")
# Redis Details
redis_section = "REDIS"
redis_uri = config[redis_section]["REDIS_URI"]
redis_login_db = int(config[redis_section]["login_db"])
user_role_permissions = config[redis_section]["user_role_permissions"]
project_tags_db = config[redis_section]["project_tags_db"]
class APIEndpoints:
# scheduler APIs
scheduler_base = "/scheduler"
schedule = "/schedule"
delete_schedule = "/delete"
fetch_schedule_id = "/fetch/schedule_id"
fetch_schedules = "/fetch/all_schedules"
fetch_schedule_details = "/fetch/schedule_details"
fetch_schedule_details_table = "/fetch/schedule_details_table"
class DatabaseConstants:
db_ilens_configuration = "ilens_configuration"
collection_scheduled_metadata = "schedule_metadata"
collection_scheduled_jobs = "scheduled_jobs"
collection_user = "user"
collection_user_project = "user_project"
collection_scheduled_job_runs = "scheduled_job_runs"
class CommonKeys:
schedule_id = "schedule_id"
KEY_WEEKLY = "weekly"
KEY_MONTHLY = "monthly"
KEY_YEARLY = "yearly"
KEY_ONCE = "once"
KEY_MONTH = "month"
KEY_HOUR = "hour"
KEY_DAY = "day"
KEY_MINUTE = "minute"
KEY_DAILY = "daily"
KEY_YEAR = "year"
KEY_DAY_OF_WEEK = "day_of_week"
KEY_DAY_OF_MONTH = "day_of_month"
KEY_MONTH_OF_YEAR = "month_of_year"
KEY_CRON_DICT = "cron_dict"
KEY_WEEK = "week"
KEY_RECURSION_COUNT = "recursion_count"
KEY_TIMEZONE = "timezone"
class Secrets:
LOCK_OUT_TIME_MINS = 30
leeway_in_mins = 10
unique_key = "45c37939-0f75"
token = "8674cd1d-2578-4a62-8ab7-d3ee5f9a"
issuer = "ilens"
alg = "RS256"
signature_key = "kliLensKLiLensKL"
signature_key_alg = ["HS256"]
class APIJobConstants:
response_codes = {
"200": dict(status=True, message="Job response for #schedule_id#: #msg#"),
"210": dict(status=True, message="No response expected from a UDP protocol"),
"401": dict(
status=False,
message="Job faced an authorization issue when accessing the API '#url#': #err#",
),
"403": dict(
status=False,
message="Job faced an authentication issue when accessing the API '#url#': #err#",
),
"404": dict(
status=False,
message="Job was unable to find the URL '#url#': #err#",
),
}
validation_failed = "Request data model validation failed!"
scheduling_failure = "Faced a problem when scheduling the job"
from scripts.utils.mongo_util import MongoConnect
from scripts.constants import app_configuration
mongo_util = MongoConnect(uri=app_configuration.Mongo.uri)
from apscheduler.executors.pool import ProcessPoolExecutor, ThreadPoolExecutor
from apscheduler.jobstores.mongodb import MongoDBJobStore
from apscheduler.schedulers.background import BackgroundScheduler
from pytz import utc
from scripts.constants.app_configuration import Mongo, SchedulerConf
from scripts.constants.app_constants import DatabaseConstants
from scripts.logging.logging import logger
database_name = DatabaseConstants.db_ilens_configuration
collection = DatabaseConstants.collection_scheduled_jobs
job_store = {
"default": MongoDBJobStore(
database=database_name,
collection=collection,
host=Mongo.uri,
connect=False,
)
}
executors = {
"default": ThreadPoolExecutor(SchedulerConf.scheduler_thread),
"processpool": ProcessPoolExecutor(SchedulerConf.scheduler_process),
}
job_defaults = {"coalesce": False, "misfire_grace_time": SchedulerConf.misfire_grace_time_in_s, "max_instances": SchedulerConf.max_instance}
logger.info("Scheduler Initialising")
scheduler = BackgroundScheduler(
jobstores=job_store,
executors=executors,
job_defaults=job_defaults,
timezone=utc,
daemon=True,
)
logger.info("Scheduler Initialised")
This diff is collapsed.
from scripts.constants.app_constants import DatabaseConstants, CommonKeys
from scripts.constants.db_connection import mongo_util
from scripts.logging.logging import logger
from scripts.utils.mongo_util import MongoCollectionClass
class ScheduleMetadataCollection(MongoCollectionClass):
def __init__(self):
self.database = DatabaseConstants.db_ilens_configuration
self.collection = DatabaseConstants.collection_scheduled_metadata
@property
def key_schedule_id(self):
return CommonKeys.schedule_id
def get_schedule_id(self, filters):
try:
data = mongo_util.find_one(
database_name=self.database,
collection_name=self.collection,
query=filters,
filter_dict={
self.key_schedule_id: 1,
self.key_mongo_default_id: 0,
},
)
return data
except Exception as e:
logger.exception(e)
raise
def get_all_schedules(self, filters):
try:
data = mongo_util.find(
database_name=self.database,
collection_name=self.collection,
query=filters,
)
return data
except Exception as e:
logger.exception(e)
raise
def get_schedule_details(self, schedule_id):
try:
query = {self.key_schedule_id: schedule_id}
data = mongo_util.find_one(
database_name=self.database,
collection_name=self.collection,
query=query,
)
return data
except Exception as e:
logger.exception(e)
raise
def delete_schedule(self, schedule_id):
try:
query = {self.key_schedule_id: schedule_id}
data = mongo_util.delete_one(
database_name=self.database,
collection_name=self.collection,
query=query,
)
return data
except Exception as e:
logger.exception(e)
raise
from scripts.constants.app_constants import DatabaseConstants, CommonKeys
from scripts.constants.db_connection import mongo_util
from scripts.logging.logging import logger
from scripts.utils.mongo_util import MongoCollectionClass
from scripts.constants.db_connection import mongo_util
class User(MongoCollectionClass):
def __init__(self):
self.database = DatabaseConstants.db_ilens_configuration
self.collection = DatabaseConstants.collection_user
def find_user_role_for_user_id(self, user_id, project_id):
query = {"user_id": user_id, "project_id": project_id}
filter_dict = {"userrole": 1, "_id": 0}
return mongo_util.find_one(query=query, filter_dict=filter_dict)
\ No newline at end of file
from scripts.constants.app_constants import DatabaseConstants, CommonKeys
from scripts.constants.db_connection import mongo_util
from scripts.logging.logging import logger
from scripts.utils.mongo_util import MongoCollectionClass
class UserProject(MongoCollectionClass):
def __init__(self):
self.database = DatabaseConstants.db_ilens_configuration
self.collection = DatabaseConstants.collection_user_project
def find_user_role_for_user_id(self, user_id, project_id):
query = {"user_id": user_id, "project_id": project_id}
filter_dict = {"userrole": 1, "_id": 0}
return mongo_util.find_one(query=query, filter_dict=filter_dict)
import redis
from scripts.constants.app_configuration import redis_uri, redis_login_db, user_role_permissions, project_tags_db
login_db = redis.from_url(redis_uri, db=int(redis_login_db), decode_responses=True)
user_role_permissions_redis = redis.from_url(
redis_uri, db=user_role_permissions, decode_responses=True
)
project_details_db = redis.from_url(redis_uri, db=project_tags_db, decode_responses=True)
\ No newline at end of file
class ILensSchedulerError(Exception):
"""Generic ILensSchedulerErrors Error"""
def __init__(self, msg):
Exception.__init__(self, msg)
"""
Base Error Class
"""
class ErrorMessages:
JOB_RUN_META_DATA = "Scheduler faced a problem when storing the job run metadata"
SCHEDULE_META_DATA = "Scheduler faced a problem when storing the schedule metadata"
ERROR_ADD_JOB = "Scheduler faced a problem when adding a job"
NECESSARY_JOB_DETAILS = "Scheduler faced a problem when creating necessary job details"
\ No newline at end of file
class ILensErrors(Exception):
def __init__(self, msg):
Exception.__init__(self, msg)
class AuthenticationError(ILensErrors):
"""
JWT Authentication Error
"""
class ErrorMessages:
ERROR001 = "Authentication Failed. Please verify token"
ERROR002 = "Signature Expired"
ERROR003 = "Signature Not Valid"
# All error in the MONGO... series are exception codes for errors related to MongoDB
MONGO001 = "Error Code MONGO001: Server was unable to establish connection with MongoDB"
MONGO002 = "Error Code MONGO002: Server faced a problem when inserting document(s) into MongoDB"
MONGO003 = "Error Code MONGO003: Server faced a problem to find the document(s) with the given condition"
MONGO004 = "Error Code MONGO004: Server faced a problem to delete the document(s) with the given condition"
MONGO005 = "Error Code MONGO005: Server faced a problem to update the document(s) with the given condition and data"
MONGO006 = "Error Code MONGO006: Server faced a problem when aggregating the data"
MONGO007 = "Error Code MONGO007: Server faced a problem when closing MongoDB connection"
MONGO008 = "Error Code MONGO008: Found an existing record with the same ID in MongoDB"
MONGO009 = "Error Code MONGO009: Server faced a problem when fetching distinct documents from MongoDB"
MONGO010 = "Error Code MONGO010: Server faced a problem when performing a search and replace in MongoDB"
MONGO011 = "Error Code MONGO011: Server faced a problem when de-serializing MongoDB object"
class ILensException(Exception):
pass
class MongoException(ILensException):
pass
class MongoConnectionException(MongoException):
pass
class MongoQueryException(MongoException):
pass
class MongoEncryptionException(MongoException):
pass
class MongoRecordInsertionException(MongoQueryException):
pass
class MongoFindException(MongoQueryException):
pass
class MongoDeleteException(MongoQueryException):
pass
class MongoUpdateException(MongoQueryException):
pass
class MongoUnknownDatatypeException(MongoEncryptionException):
pass
class MongoDistictQueryException(MongoException):
pass
class MongoFindAndReplaceException(MongoException):
pass
class MongoObjectDeserializationException(MongoException):
pass
logger:
name: ilens
level: DEBUG
handlers:
- type: RotatingFileHandler
max_bytes: 100000000
back_up_count: 5
- type: SocketHandler
host: localhost
port: 23582
- type: StreamHandler
name: ilens_visualizer
import logging
import os
from logging import StreamHandler
from logging.handlers import RotatingFileHandler, SocketHandler
import yaml
from scripts.constants.app_configuration import PathToStorage
# this method is to read the configuration from backup.conf
def read_configuration(file_name):
"""
:param file_name:
:return: all the configuration constants
"""
with open(file_name, 'r') as stream:
try:
return yaml.safe_load(stream)
except Exception as e:
print(f"Failed to load Configuration. Error: {e}")
config = read_configuration("scripts/logging/logger_conf.yml")
logging_config = config["logger"]
def get_logger():
"""
Creates a rotating log
"""
__logger__ = logging.getLogger('')
__logger__.setLevel(logging_config["level"].upper())
log_formatter = '%(asctime)s - %(levelname)-6s - [%(threadName)5s:%(funcName)5s():''' \
'%(lineno)s] - %(message)s'
time_format = "%Y-%m-%d %H:%M:%S"
file_path = PathToStorage.LOGS_MODULE_PATH
formatter = logging.Formatter(log_formatter, time_format)
for each_handler in logging_config["handlers"]:
if each_handler["type"] in ["RotatingFileHandler"]:
if not os.path.exists(file_path):
os.makedirs(file_path)
log_file = os.path.join(f"{file_path}{logging_config['name']}.log")
temp_handler = RotatingFileHandler(log_file,
maxBytes=each_handler["max_bytes"],
backupCount=each_handler["back_up_count"])
temp_handler.setFormatter(formatter)
elif each_handler["type"] in ["SocketHandler"]:
temp_handler = SocketHandler(each_handler["host"], each_handler["port"])
elif each_handler["type"] in ["StreamHandler"]:
temp_handler = StreamHandler()
temp_handler.setFormatter(formatter)
else:
temp_handler = None
__logger__.addHandler(temp_handler)
return __logger__
logger = get_logger()
from typing import Optional, Any
from pydantic import BaseModel
class DefaultResponse(BaseModel):
status: str = "Failed"
message: Optional[str]
data: Optional[Any]
class DefaultFailureResponse(DefaultResponse):
error: Any
import time
from typing import Dict, Optional, List, Any, Union
from pydantic import BaseModel
class IntervalSettings(BaseModel):
# One time
trigger_date_time: int = int(time.time() * 1000)
# Recurring
# # Every n minutes
from_date: int = int(time.time() * 1000)
to_date: int = int(time.time() * 1000)
daily_start_time: Optional[int]
daily_end_time: Optional[int]
configure_daily_time_range: Optional[bool] = False
minute: Optional[int]
# # Every n hour
hour: Optional[int]
# # Daily
selected_interval: Optional[str]
trigger_time: Optional[int]
# # Weekly
selected_week_days: Optional[List]
# # Monthly
selected_months: Optional[List]
date_enabled: Optional[bool] = True
days_enabled: Optional[bool] = False
selected_date: Optional[str]
run_on_day: Optional[str]
run_on_occurrence: Optional[str]
class APIProperties(BaseModel):
api_url: str
api_method: str
payload: Optional[Dict]
class FunctionProperties(BaseModel):
class_name: str
func_name: str
args: Optional[Any]
kwargs: Optional[Dict]
class JobDetails(BaseModel):
execution_method: str
execution_properties: Optional[Union[APIProperties, FunctionProperties]]
class ScheduleProperties(BaseModel):
trigger_type: str = "onetime"
trigger_interval: str
interval_properties: IntervalSettings
class ScheduleMetadata(BaseModel):
created_on: int = int(time.time() * 1000)
created_by: str
last_updated: int = int(time.time() * 1000)
last_updated_by: str
desc: str
class ScheduleJobRequest(BaseModel):
user_id: Optional[str] = "system"
schedule_properties: Optional[ScheduleProperties]
job_details: Optional[JobDetails]
job_type: str
scheduler_type: str = "server"
project_id: str
hierarchy: Optional[str]
schedule_id: str = None
advanced_schedule_properties:Optional[Dict]
schedule_type: Optional[str]
class SchedulesFetchRequest(BaseModel):
filters: Optional[Dict]
class ScheduledIDFetchRequest(SchedulesFetchRequest):
pass
from fastapi import APIRouter
from pydantic import ValidationError
from scripts.constants.app_constants import APIEndpoints, APIJobConstants
from scripts.core.handlers.scheduler_handler import SchedulerHandler
from scripts.logging.logging import logger
from scripts.schemas.response_models import DefaultFailureResponse, DefaultResponse
from scripts.schemas.scheduler_schema import (
ScheduledIDFetchRequest,
ScheduleJobRequest,
SchedulesFetchRequest,
)
scheduler_handler = SchedulerHandler()
scheduler_router = APIRouter(prefix=APIEndpoints.scheduler_base)
health_check = APIRouter()
@health_check.get("/api/iLens-schedular/healthcheck")
async def ping():
return {"status": 200}
@scheduler_router.post(APIEndpoints.schedule, tags=["scheduler"])
async def schedule(request_data: ScheduleJobRequest):
try:
response_json = scheduler_handler.create_scheduled_job(request_data)
return DefaultResponse(status="success", data=response_json)
except ValidationError as e:
logger.error(f"Request data model validation failed: {e.json()}")
return DefaultFailureResponse(
status="failed",
message=APIJobConstants.validation_failed,
error=e.json(),
)
except Exception as e:
logger.error(f"Faced a problem when scheduling the job: {e}")
return DefaultFailureResponse(
status="failed",
message=APIJobConstants.validation_failed,
error=e,
)
@scheduler_router.post(APIEndpoints.fetch_schedule_id, tags=["scheduler"])
async def fetch_schedule_id(request_data: ScheduledIDFetchRequest):
try:
response_json = await scheduler_handler.fetch_schedule_id(request_data)
return DefaultResponse(status="success", data=response_json)
except ValidationError as e:
logger.error(f"Request data model validation failed: {e.json()}")
return DefaultFailureResponse(
status="failed",
message=APIJobConstants.validation_failed,
error=e.json(),
)
except Exception as e:
logger.error(f"Faced a problem when scheduling the job: {e}")
return DefaultFailureResponse(
status="failed",
message=APIJobConstants.scheduling_failure,
error=e,
)
@scheduler_router.post(APIEndpoints.fetch_schedules, tags=["scheduler"])
async def fetch_schedules(request_data: SchedulesFetchRequest):
try:
response_json = await scheduler_handler.fetch_all_schedule(request_data)
return DefaultResponse(status="success", data=response_json)
except ValidationError as e:
logger.error(f"Request data model validation failed: {e.json()}")
return DefaultFailureResponse(
status="failed",
message=APIJobConstants.validation_failed,
error=e.json(),
)
except Exception as e:
logger.error(f"Faced a problem when scheduling the job: {e}")
return DefaultFailureResponse(
status="failed",
message=APIJobConstants.scheduling_failure,
error=e,
)
@scheduler_router.get(APIEndpoints.fetch_schedule_details, tags=["scheduler"])
async def fetch_schedule_details(schedule_id: str):
try:
response_json = await scheduler_handler.fetch_schedule_details(
schedule_id=schedule_id
)
return DefaultResponse(status="success", data=response_json)
except ValidationError as e:
logger.error(f"Request data model validation failed: {e.json()}")
return DefaultFailureResponse(
status="failed",
message=APIJobConstants.validation_failed,
error=e.json(),
)
except Exception as e:
logger.error(f"Faced a problem when scheduling the job: {e}")
return DefaultFailureResponse(
status="failed",
message=APIJobConstants.scheduling_failure,
error=e,
)
@scheduler_router.get(APIEndpoints.fetch_schedule_details_table, tags=["scheduler"])
async def fetch_schedule_table(project_id: str):
try:
response_json = await scheduler_handler.fetch_schedule_table(
project_id=project_id
)
return DefaultResponse(
status="success",
data=response_json,
message="Data Fetched Successfully",
)
except ValidationError as e:
logger.error(f"Request data model validation failed: {e.json()}")
return DefaultFailureResponse(
status="failed",
message=APIJobConstants.validation_failed,
error=e.json(),
)
except Exception as e:
logger.error(f"Faced a problem when scheduling the job: {e}")
return DefaultFailureResponse(
status="failed",
message=APIJobConstants.scheduling_failure,
error=e,
)
@scheduler_router.get(APIEndpoints.delete_schedule, tags=["scheduler"])
async def fetch_schedule_details(schedule_id: str):
try:
response_json = await scheduler_handler.delete_schedule(schedule_id=schedule_id)
return DefaultResponse(
status="success",
data=response_json,
message="Job Deleted Successfully",
)
except ValidationError as e:
logger.error(f"Request data model validation failed: {e.json()}")
return DefaultFailureResponse(
status="failed",
message=APIJobConstants.validation_failed,
error=e.json(),
)
except Exception as e:
logger.error(f"Faced a problem when scheduling the job: {e}")
return DefaultFailureResponse(
status="failed",
message=APIJobConstants.scheduling_failure,
error=e,
)
from scripts.constants.app_constants import Secrets
from scripts.logging.logging import logger
from scripts.utils.security_utils.apply_encryption_utility import create_token
def create_login_token(host: str = '127.0.0.1', user_id=None, internal_token=Secrets.token, project_id=None):
"""
This method is to create a cookie
"""
try:
if user_id is None or user_id == "system":
user_id = "user_099"
return create_token(
user_id=user_id,
ip=host,
token=internal_token,
project_id=project_id
)
except Exception as e:
logger.exception(str(e))
raise
"""
Mongo Utility
Author: Irfanuddin Shafi Ahmed
Reference: Pymongo Documentation
"""
import sys
from typing import Dict, List, Optional
from pymongo import MongoClient
class MongoCollectionClass:
@property
def key_mongo_default_id(self):
return "_id"
class MongoException(Exception):
pass
class MongoConnect:
def __init__(self, uri):
try:
self.client = MongoClient(uri, connect=False)
except Exception as e:
print(e)
sys.exit(1)
def insert_one(self, database_name: str, collection_name: str, data: Dict):
"""
The function is used to inserting a document to a collection in a Mongo Database.
:param database_name: Database Name
:param collection_name: Collection Name
:param data: Data to be inserted
:return: Insert ID
"""
try:
db = self.client[database_name]
collection = db[collection_name]
response = collection.insert_one(data)
return response.inserted_id
except Exception as e:
raise MongoException(e)
def insert_many(
self, database_name: str, collection_name: str, data: List
):
"""
The function is used to inserting documents to a collection in a Mongo Database.
:param database_name: Database Name
:param collection_name: Collection Name
:param data: List of Data to be inserted
:return: Insert IDs
"""
try:
db = self.client[database_name]
collection = db[collection_name]
response = collection.insert_many(data)
return response.inserted_ids
except Exception as e:
raise MongoException(e)
def find(
self,
database_name: str,
collection_name: str,
query: Dict,
filter_dict: Optional[Dict] = None,
sort=None,
skip: Optional[int] = 0,
limit: Optional[int] = None,
):
"""
The function is used to query documents from a given collection in a Mongo Database
:param database_name: Database Name
:param collection_name: Collection Name
:param query: Query Dictionary
:param filter_dict: Filter Dictionary
:param sort: List of tuple with key and direction. [(key, -1), ...]
:param skip: Skip Number
:param limit: Limit Number
:return: List of Documents
"""
if sort is None:
sort = list()
if filter_dict is None:
filter_dict = {"_id": 0}
try:
db = self.client[database_name]
collection = db[collection_name]
if len(sort) > 0:
cursor = (
collection.find(query, filter_dict).sort(sort).skip(skip)
)
else:
cursor = collection.find(query, filter_dict).skip(skip)
if limit:
cursor = cursor.limit(limit)
response = list(cursor)
cursor.close()
return response
except Exception as e:
raise MongoException(e)
def find_one(
self,
database_name: str,
collection_name: str,
query: Dict,
filter_dict: Optional[Dict] = None,
):
try:
if filter_dict is None:
filter_dict = {"_id": 0}
db = self.client[database_name]
collection = db[collection_name]
response = collection.find_one(query, filter_dict)
return response
except Exception as e:
raise MongoException(e)
def update_one(
self,
database_name: str,
collection_name: str,
query: Dict,
data: Dict,
upsert: bool = False,
):
"""
:param upsert:
:param database_name:
:param collection_name:
:param query:
:param data:
:return:
"""
try:
db = self.client[database_name]
collection = db[collection_name]
response = collection.update_one(
query, {"$set": data}, upsert=upsert
)
return response.modified_count
except Exception as e:
raise MongoException(e)
def delete_many(
self, database_name: str, collection_name: str, query: Dict
):
"""
:param database_name:
:param collection_name:
:param query:
:return:
"""
try:
db = self.client[database_name]
collection = db[collection_name]
response = collection.delete_many(query)
return response.deleted_count
except Exception as e:
raise MongoException(e)
def delete_one(
self, database_name: str, collection_name: str, query: Dict
):
"""
:param database_name:
:param collection_name:
:param query:
:return:
"""
try:
db = self.client[database_name]
collection = db[collection_name]
response = collection.delete_one(query)
return response.deleted_count
except Exception as e:
raise MongoException(e)
def distinct(
self,
database_name: str,
collection_name: str,
query_key: str,
filter_json: Optional[Dict] = None,
):
"""
:param database_name:
:param collection_name:
:param query_key:
:param filter_json:
:return:
"""
try:
db = self.client[database_name]
collection = db[collection_name]
response = collection.distinct(query_key, filter_json)
return response
except Exception as e:
raise MongoException(e)
import uuid
from datetime import timedelta, datetime
from scripts.constants.app_constants import Secrets
from scripts.database.redis_connections import login_db
from scripts.utils.security_utils.jwt_util import JWT
jwt = JWT()
def create_token(user_id, ip, token, age=Secrets.LOCK_OUT_TIME_MINS, login_token=None, project_id=None):
"""
This method is to create a cookie
"""
try:
uid = login_token
if not uid:
uid = str(uuid.uuid4()).replace("-", "")
payload = {
"ip": ip,
"user_id": user_id,
"token": token,
"uid": uid,
"age": age
}
if project_id:
payload["project_id"] = project_id
exp = datetime.utcnow() + timedelta(minutes=age)
_extras = {"iss": Secrets.issuer, "exp": exp}
_payload = {**payload, **_extras}
new_token = jwt.encode(_payload)
# Add session to redis
login_db.set(uid, new_token)
login_db.expire(uid, timedelta(minutes=age))
return uid
except Exception:
raise
from secrets import compare_digest
from typing import Optional
from fastapi import HTTPException, Request, Response, status
from fastapi.openapi.models import APIKey, APIKeyIn
from fastapi.security import APIKeyCookie
from fastapi.security.api_key import APIKeyBase
from pydantic import BaseModel, Field
from scripts.constants.app_configuration import Service
from scripts.constants.app_constants import Secrets
from scripts.database.redis_connections import login_db
from scripts.logging.logging import logger
from scripts.utils.security_utils.apply_encryption_utility import create_token
from scripts.utils.security_utils.jwt_util import JWT
class CookieAuthentication(APIKeyBase):
"""
Authentication backend using a cookie.
Internally, uses a JWT token to store the data.
"""
scheme: APIKeyCookie
cookie_name: str
cookie_secure: bool
def __init__(
self,
cookie_name: str = "login-token",
):
super().__init__()
self.model: APIKey = APIKey(**{"in": APIKeyIn.cookie}, name=cookie_name)
self.scheme_name = self.__class__.__name__
self.cookie_name = cookie_name
self.scheme = APIKeyCookie(name=self.cookie_name, auto_error=False)
self.login_redis = login_db
self.jwt = JWT()
async def __call__(self, request: Request, response: Response) -> str:
cookies = request.cookies
login_token = cookies.get("login-token")
if not login_token:
login_token = request.headers.get("login-token")
if not login_token:
raise HTTPException(status_code=401)
jwt_token = self.login_redis.get(login_token)
# logger.debug(f'jwt token: {jwt_token}')
if not jwt_token:
raise HTTPException(status_code=401)
try:
decoded_token = self.jwt.validate(token=jwt_token)
if not decoded_token:
raise HTTPException(status_code=401)
except Exception as e:
logger.debug(f'Exception in decoded token: {str(e)}')
raise HTTPException(status_code=401, detail=e.args)
user_id = decoded_token.get("user_id")
project_id = decoded_token.get("project_id")
cookie_user_id = request.cookies.get(
"user_id", request.cookies.get(
"userId", request.headers.get("userId")
))
_token = decoded_token.get("token")
_age = int(decoded_token.get("age", Secrets.LOCK_OUT_TIME_MINS))
if not compare_digest(Secrets.token, _token):
raise HTTPException(status_code=401)
if login_token != decoded_token.get("uid"):
raise HTTPException(status_code=401)
if cookie_user_id and not compare_digest(user_id, cookie_user_id):
raise HTTPException(status_code=401)
try:
new_token = create_token(
user_id=user_id,
ip=request.client.host,
token=Secrets.token,
age=_age,
login_token=login_token,
project_id=project_id
)
except Exception as e:
logger.debug(f'Exception in create token: {str(e)}')
raise HTTPException(status_code=401, detail=e.args)
response.set_cookie(
'login-token',
new_token,
samesite='strict',
httponly=True,
secure=Service.secure_cookie,
max_age=Secrets.LOCK_OUT_TIME_MINS * 60,
)
response.headers['login-token'] = new_token
# If project ID is null, this is susceptible to 500 Status Code. Ensure token formation has project ID in
# # login token
response.headers.update({"login-token": new_token,
"projectId": project_id,
"project_id": project_id,
"userId": user_id,
"user_id": user_id})
return user_id
class MetaInfoSchema(BaseModel):
projectId: Optional[str] = ""
project_id: Optional[str] = ""
user_id: Optional[str] = ""
language: Optional[str] = ""
ip_address: Optional[str] = ""
login_token: Optional[str] = Field(alias="login-token")
class Config:
allow_population_by_field_name = True
class MetaInfoCookie(APIKeyBase):
"""
Project ID backend using a cookie.
"""
scheme: APIKeyCookie
def __init__(self):
super().__init__()
self.model: APIKey = APIKey(**{"in": APIKeyIn.cookie}, name="meta")
self.scheme_name = self.__class__.__name__
def __call__(self, request: Request, response: Response):
cookies = request.cookies
cookie_json = {
"projectId": cookies.get("projectId", request.headers.get("projectId")),
"userId": cookies.get("user_id", cookies.get("userId", request.headers.get("userId"))),
"language": cookies.get("language", request.headers.get("language")),
}
return MetaInfoSchema(
project_id=cookie_json["projectId"],
user_id=cookie_json["userId"],
projectId=cookie_json["projectId"],
language=cookie_json["language"],
ip_address=request.client.host,
login_token=cookies.get("login-token"),
)
class GetUserID(APIKeyBase):
"""
Project ID backend using a cookie.
"""
scheme: APIKeyCookie
def __init__(self):
super().__init__()
self.model: APIKey = APIKey(**{"in": APIKeyIn.cookie}, name="user_id")
self.scheme_name = self.__class__.__name__
def __call__(self, request: Request, response: Response):
if user_id := request.cookies.get("user_id", request.cookies.get("userId", request.headers.get("userId"))):
return user_id
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED)
import jwt
from jwt.exceptions import (
InvalidSignatureError,
ExpiredSignatureError,
MissingRequiredClaimError,
)
from scripts.constants.app_configuration import KeyPath
from scripts.constants.app_constants import Secrets
from scripts.exceptions import AuthenticationError, ErrorMessages
from scripts.logging.logging import logger
class JWT:
def __init__(self):
self.max_login_age = Secrets.LOCK_OUT_TIME_MINS
self.issuer = Secrets.issuer
self.alg = Secrets.alg
self.public = KeyPath.public
self.private = KeyPath.private
def encode(self, payload):
try:
logger.debug('Inside encode')
with open(self.private, "r") as f:
key = f.read()
return jwt.encode(payload, key, algorithm=self.alg)
except Exception as e:
logger.debug(f'Exception in encode: {str(e)}')
raise
finally:
f.close()
def validate(self, token):
try:
logger.debug(f'Inside validate')
with open(self.public, "r") as f:
key = f.read()
payload = jwt.decode(
token,
key,
algorithms=self.alg,
leeway=Secrets.leeway_in_mins,
options={"require": ["exp", "iss"]},
)
return payload
except InvalidSignatureError:
raise AuthenticationError(ErrorMessages.ERROR003)
except ExpiredSignatureError:
raise AuthenticationError(ErrorMessages.ERROR002)
except MissingRequiredClaimError:
raise AuthenticationError(ErrorMessages.ERROR002)
except Exception as e:
logger.debug(f'Exception in validate: {str(e)}')
raise
finally:
f.close()
import logging
from datetime import timedelta, datetime, timezone
from functools import lru_cache, wraps
import orjson as json
from fastapi import HTTPException, Request, status
from scripts.database.mongo.ilens_configuration.collections.user import User
from scripts.database.mongo.ilens_configuration.collections.user_project import UserProject
from scripts.database.redis_connections import user_role_permissions_redis
def timed_lru_cache(seconds: int = 10, maxsize: int = 128):
def wrapper_cache(func):
func = lru_cache(maxsize=maxsize)(func)
func.lifetime = timedelta(seconds=seconds)
func.expiration = datetime.now(timezone.utc) + func.lifetime
@wraps(func)
def wrapped_func(*args, **kwargs):
if datetime.now(timezone.utc) >= func.expiration:
logging.debug("Cache Expired")
func.cache_clear()
func.expiration = datetime.now(timezone.utc) + func.lifetime
return func(*args, **kwargs)
return wrapped_func
return wrapper_cache
@timed_lru_cache(seconds=60, maxsize=1000)
def get_user_role_id(user_id, project_id):
logging.debug("Fetching user role from DB")
user_conn = User() # user collection from ilens_configuration DB
if user_role := user_conn.find_user_role_for_user_id(user_id=user_id, project_id=project_id):
return user_role["userrole"][0]
# if user not found in primary collection, check if user is in project collection
user_proj_conn = UserProject() # user_project collection from ilens_configuration DB
if user_role := user_proj_conn.find_user_role_for_user_id(user_id=user_id, project_id=project_id):
return user_role["userrole"][0]
class RBAC:
def __init__(self, entity_name: str, operation: list[str]):
self.entity_name = entity_name
self.operation = operation
def check_permissions(self, user_id: str, project_id: str) -> dict[str, bool]:
user_role_id = get_user_role_id(user_id, project_id)
if not user_role_id:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User role not found!")
r_key = f"{project_id}__{user_role_id}" # eg: project_100__user_role_100
user_role_rec = user_role_permissions_redis.hget(r_key, self.entity_name)
if not user_role_rec:
return {} # TODO: raise exception here
user_role_rec = json.loads(user_role_rec)
if permission_dict := {i: True for i in self.operation if user_role_rec.get(i)}:
return permission_dict
else:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Insufficient Permission!")
def __call__(self, request: Request) -> dict[str, bool]:
user_id = request.cookies.get("userId", request.headers.get("userId"))
project_id = request.cookies.get("projectId", request.headers.get("projectId"))
return self.check_permissions(user_id=user_id, project_id=project_id)
deployment:
environmentVar:
- name: MODULE_NAME
value: "main"
- name: PORT
value: "28595"
- name: MONGO_URI
valueFrom:
secretKeyRef:
name: mongo-creds
key: MONGO_URI
- name: APP_ENV
value: prod
- name: APP_NAME
value: "ilens_scheduler"
- name: SCHEDULER_THREAD
value: "120"
- name: SCHEDULER_PROCESS
value: "20"
- name: MAX_INSTANCE
value: "200"
- name: MISFIRE_SEC
value: "180"
- name: BASE_PATH
value: "/code/data"
- name: MOUNT_DIR
value: "/ilens_scheduler"
- name: REDIS_URI
value: "redis://redis-db-service.ilens-infra:6379"
- name: SECURE_ACCESS
value: "True"
- name: CORS_URLS
value: "https://qa.ilens.io,https://staging.ilens.io"
- name: SW_DOCS_URL
value: "/docs"
- name: SW_OPENAPI_URL
value: "/openapi.json"
- name: ENABLE_CORS
value: "True"
- name: SECURE_COOKIE
value: "True"
- name: VERIFY_SIGNATURE
value: "True"
- name: PROTECTED_HOSTS
value: "*.unifytwin.com,*.ilens.io"
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment