Commit 6d69a778 authored by harshavardhan.c's avatar harshavardhan.c

Completed one version services for category configuration for support lens

parents
[service]
name = ilens
host= 0.0.0.0
port=8585
interface=eth0
SECRET_KEY= KLKey
enable_security= False
cookie_max_age_in_mins= 30
max_attempts = 5
lockout_time_interval = 60
httpflag = True
secureflag = False
reset_time_interval = 0.5
apply_processor_count = False
workers = 3
threads = 6
ip_check = False
[log]
file_name= ilens
path= logs/
level=DEBUG
handler= rotating_file_handler
max_bytes= 10000000
back_up_count= 10
[mongo_db]
host= 192.168.0.220
port= 2717
username=
password=
authSource=
authMechanism=
mongo_constants_file_path= conf/mongo_encryption_constants.json
[kairos_db]
url= http://ilens_kairos:8080
[system_login]
domain_list= ["knowledgelens.com"]
[csv_conf]
csv_path= Log/csv/
upload_path= Log/UPLOAD
[schedule_rule_engine]
url= http://localhost:9997/create_job
[profile_pic_path]
base_path= templates/profile_pic/
[email_default_baseurl]
base_url= https://app.ilens.io/cloud/send_mail
[ai_rules]
enabled= true
docker_endpoint= tcp://localhost:4243
[flow_model]
container_url= http://localhost:8180
[IMAGE]
path= images/
[LICENSE_PATH]
path= license/
[pipeline_internal]
mqtt_broker_host= 192.168.0.220
mqtt_broker_port= 1883
mqtt_broker_ssl= false
mqtt_broker_conn_type = tcp
mqtt_broker_ws_port= 8083
node_intermediate = mqtt
queue_host= 192.168.0.220
queue_port= 9092
[channel_pp_debug_node]
host= 192.168.0.220
port= 1883
topic = ilens/pipeline/debug
user_name =
password =
[AGENT]
manager_url = http://192.168.0.220/ilens_api
[REDIS]
host= 192.168.0.220
port= 6379
key_expiry= 100
rules_db=0
alarms_db=1
live_tags_db=4
audit_db = 6
audit_queue= audit
[ILENS_VERSION]
version = v5.1
[DATA_PROCESSOR]
host = localhost
connection = tcp
port = 1883
topic = ilens/monitor/live/device_monitor_alarms
[LICENSE_SERVER]
host = 192.168.0.220
port = 9816
[service]
name = ilens
host= 0.0.0.0
port=8585
interface=eth0
SECRET_KEY= KLKey
enable_security= False
cookie_max_age_in_mins= 30
max_attempts = 5
lockout_time_interval = 60
httpflag = True
secureflag = False
reset_time_interval = 0.5
workers = 1
[log]
file_name= ilens
path= logs/
level=DEBUG
handler= rotating_file_handler
max_bytes= 10000000
back_up_count= 10
[mongo_db]
host= #MONGO_HOST#
port= 2717
username=
password=
authSource=
authMechanism=
mongo_constants_file_path= conf/mongo_encryption_constants.json
[kairos_db]
url= http://#KAIROS_CASSANDRA_HOST#:8080
[system_login]
domain_list= ["knowledgelens.com"]
[csv_conf]
csv_path= Log/csv/
upload_path= Log/UPLOAD
[schedule_rule_engine]
url= http://localhost:9997/create_job
[profile_pic_path]
base_path= templates/profile_pic/
[email_default_baseurl]
base_url= https://app.ilens.io/cloud/send_mail
[ai_rules]
enabled= true
docker_endpoint= tcp://localhost:4243
[flow_model]
container_url= http://localhost:8180
[IMAGE]
path= /images/metaservice/
[LICENSE_PATH]
path= license/
[pipeline_internal]
mqtt_broker_host= #MQTT_HOST#
mqtt_broker_port= 1883
mqtt_broker_ssl= false
mqtt_broker_conn_type = tcp
mqtt_broker_ws_port= 8083
node_intermediate= queue
queue_host= #KAFKA_HOST#
queue_port= 9092
[channel_pp_debug_node]
host= #CHANNEL_PP#
port= 1883
topic = ilens/pipeline/debug
user_name =
password =
[AGENT]
manager_url = http://#AGENT_URL#/ilens_api
[REDIS]
host= #REDIS_HOST#
port= 6379
key_expiry= 100
rules_db=0
alarms_db=1
live_tags_db=4
audit_db = 6
audit_queue= audit
[ILENS_VERSION]
version = v5.1
[DATA_PROCESSOR]
host = #DATA_PROCESSOR_HOST#
connection = tcp
port = 1883
topic = ilens/monitor/live/device_monitor_alarms
\ No newline at end of file
[service]
name = ilens
host= 0.0.0.0
port=8585
interface=eth0
SECRET_KEY= KLKey
enable_security= False
cookie_max_age_in_mins= 30
max_attempts = 5
lockout_time_interval = 60
httpflag = True
secureflag = False
reset_time_interval = 0.5
apply_processor_count = False
workers = 3
threads = 6
ip_check = False
[log]
file_name= ilens
path= logs/
level=DEBUG
handler= rotating_file_handler
max_bytes= 10000000
back_up_count= 10
[mongo_db]
host= 192.168.0.220
port= 2717
username=
password=
authSource=
authMechanism=
mongo_constants_file_path= conf/mongo_encryption_constants.json
[kairos_db]
url= http://ilens_kairos:8080
[system_login]
domain_list= ["knowledgelens.com"]
[csv_conf]
csv_path= Log/csv/
upload_path= Log/UPLOAD
[schedule_rule_engine]
url= http://localhost:9997/create_job
[profile_pic_path]
base_path= templates/profile_pic/
[email_default_baseurl]
base_url= https://app.ilens.io/cloud/send_mail
[ai_rules]
enabled= true
docker_endpoint= tcp://localhost:4243
[flow_model]
container_url= http://localhost:8180
[IMAGE]
path= images/
[LICENSE_PATH]
path= license/
[pipeline_internal]
mqtt_broker_host= beta.ilens.io
mqtt_broker_port= 1883
mqtt_broker_ssl= false
mqtt_broker_conn_type = tcp
mqtt_broker_ws_port= 8083
node_intermediate= mqtt
queue_host= 192.168.0.220
queue_port= 9092
[channel_pp_debug_node]
host= 192.168.0.220
port= 1883
topic = ilens/pipeline/debug
user_name =
password =
[AGENT]
manager_url = http://192.168.0.220/ilens_api
[REDIS]
host= localhost
port= 6379
key_expiry= 100
rules_db=0
alarms_db=1
live_tags_db=4
audit_db = 6
audit_queue= audit
[ILENS_VERSION]
version = v5.1
[DATA_PROCESSOR]
host = localhost
connection = tcp
port = 1883
topic = ilens/monitor/live/device_monitor_alarms
[LICENSE_SERVER]
host = 192.168.0.220
port = 9816
{
"encrypt_collection_dict" : {
"user": {
"encrypt_keys": ["phonenumber"],
"exclude_encryption": ["_id", "user_id", "client_id", "deleted_by","created_by", "username", "isdeleted", "email"]}
}
}
[service]
name = ilens
host= 0.0.0.0
port=8585
interface=eth0
SECRET_KEY= KLKey
enable_security= False
cookie_max_age_in_mins= 30
max_attempts = 5
lockout_time_interval = 60
httpflag = True
secureflag = False
reset_time_interval = 0.5
apply_processor_count = False
workers = 3
threads = 2
ip_check = False
[log]
file_name= ilens
path= logs/
level=DEBUG
handler= rotating_file_handler
max_bytes= 10000000
back_up_count= 10
[mongo_db]
host= 192.168.0.220
port= 2717
username=
password=
authSource=
authMechanism=
mongo_constants_file_path= conf/mongo_encryption_constants.json
[kairos_db]
url= http://ilens_kairos:8080
[system_login]
domain_list= ["knowledgelens.com"]
[csv_conf]
csv_path= Log/csv/
upload_path= Log/UPLOAD
[schedule_rule_engine]
url= http://localhost:9997/create_job
[profile_pic_path]
base_path= templates/profile_pic/
[email_default_baseurl]
base_url= https://app.ilens.io/cloud/send_mail
[ai_rules]
enabled= true
docker_endpoint= tcp://localhost:4243
[flow_model]
container_url= http://localhost:8180
[IMAGE]
path= images/
[LICENSE_PATH]
path= license/
[pipeline_internal]
mqtt_broker_host= beta.ilens.io
mqtt_broker_port= 1883
mqtt_broker_ssl= false
mqtt_broker_conn_type = tcp
mqtt_broker_ws_port= 8083
node_intermediate= queue
queue_host= 192.168.0.210
queue_port= 9092
[channel_pp_debug_node]
host= 192.168.0.220
port= 1883
topic = ilens/pipeline/debug
user_name =
password =
[AGENT]
manager_url = http://192.168.0.220/ilens_api
[REDIS]
host= localhost
port= 6379
key_expiry= 100
rules_db=0
alarms_db=1
live_tags_db=4
audit_db = 6
audit_queue= audit
[ILENS_VERSION]
version = v5.1
[DATA_PROCESSOR]
host = localhost
connection = tcp
port = 1883
topic = ilens/monitor/live/device_monitor_alarms
[service]
name = ilens
host= 0.0.0.0
port=9090
interface=eth0
SECRET_KEY= KLKey
enable_security= False
cookie_max_age_in_mins= 30
max_attempts = 5
lockout_time_interval = 60
httpflag = True
secureflag = False
reset_time_interval = 0.5
apply_processor_count = False
workers = 5
threads = 6
ip_check = False
[log]
file_name= ilens
path= logs/
level=DEBUG
handler= rotating_file_handler
max_bytes= 10000000
back_up_count= 10
[mongo_db]
host= 192.168.0.207
port= 2717
username=
password=
authSource=
authMechanism=
mongo_constants_file_path= conf/mongo_encryption_constants.json
[csv_conf]
csv_path= Log/csv/
upload_path= Log/UPLOAD
[REDIS]
host= 192.168.0.207
port= 6379
key_expiry= 100
rules_db=0
alarms_db=1
live_tags_db=4
audit_db = 6
audit_queue= audit
[service]
name = ilens
host= 0.0.0.0
port=8585
interface=eth0
SECRET_KEY= KLKey
enable_security= False
cookie_max_age_in_mins= 30
max_attempts = 5
lockout_time_interval = 60
httpflag = True
secureflag = False
reset_time_interval = 0.5
apply_processor_count = False
workers = 5
threads = 6
ip_check = False
[log]
file_name= ilens
path= logs/
level=DEBUG
handler= rotating_file_handler
max_bytes= 10000000
back_up_count= 10
[mongo_db]
host= 10.0.0.5
port= 2717
username=
password=
authSource=
authMechanism=
mongo_constants_file_path= conf/mongo_encryption_constants.json
[kairos_db]
url= http://10.0.0.5:8080
[system_login]
domain_list= ["knowledgelens.com"]
[csv_conf]
csv_path= Log/csv/
upload_path= Log/UPLOAD
[schedule_rule_engine]
url= http://localhost:9997/create_job
[profile_pic_path]
base_path= templates/profile_pic/
[email_default_baseurl]
base_url= https://app.ilens.io/cloud/send_mail
[ai_rules]
enabled= true
docker_endpoint= tcp://localhost:4243
[flow_model]
container_url= http://localhost:8180
[IMAGE]
path= /images/metaservices/
[LICENSE_PATH]
path= license/
[pipeline_internal]
mqtt_broker_host= 10.0.0.5
mqtt_broker_port= 1883
mqtt_broker_ssl= false
mqtt_broker_conn_type = tcp
mqtt_broker_ws_port= 8083
node_intermediate= queue
queue_host= 10.0.0.5
queue_port= 9092
[channel_pp_debug_node]
host= 10.0.0.5
port= 1883
topic = ilens/pipeline/debug
user_name =
password =
[AGENT]
manager_url = http://10.0.0.5/ilens_api
[REDIS]
host= 172.31.47.241
port= 6379
key_expiry= 100
rules_db=0
alarms_db=1
live_tags_db=4
audit_db = 6
audit_queue= audit
[ILENS_VERSION]
version = v5.1
[DATA_PROCESSOR]
host = 172.31.47.241
connection = tcp
port = 1883
topic = ilens/monitor/live/device_monitor_alarms
[LICENSE_SERVER]
host = 10.0.0.5
port = 9816
# This is a sample Python script.
# Press Shift+F10 to execute it or replace it with your code.
# Press Double Shift to search everywhere for classes, files, tool windows, actions, and settings.
def print_hi(name):
# Use a breakpoint in the code line below to debug your script.
print(f'Hi, {name}') # Press Ctrl+F8 to toggle the breakpoint.
# Press the green button in the gutter to run the script.
if __name__ == '__main__':
print_hi('PyCharm')
# See PyCharm help at https://www.jetbrains.com/help/pycharm/
import configparser
import json
import os
# read config file based on already set environment variable : APP_ENV
# If APP_ENV is null default configuration file is used
# New application config file should be in folder with same name as environment variavble
APP_ENV = os.environ.get('APP_ENV')
if not APP_ENV:
APP_ENV = 'app'
config = configparser.ConfigParser()
CONFIGURATION_FILE = f"conf/{APP_ENV}/application.conf"
config.read(CONFIGURATION_FILE)
if not config.read(CONFIGURATION_FILE):
APP_ENV = "dev"
CONFIGURATION_FILE = f"conf/{APP_ENV}/application.conf"
config.read(CONFIGURATION_FILE)
"""
Configuration module
"""
"""
Service Info
"""
SERVICE_NAME = config["service"]["name"]
SERVICE_HOST = config["service"]["host"]
SERVICE_PORT = config.getint("service", "port")
SERVICE_SECRET_KEY = config["service"]["SECRET_KEY"]
SERVICE_ENABLE_SECURITY = config.getboolean("service", "enable_security", fallback=False)
SERVICE_INTERFACE = str(config["service"]["interface"])
DEVICE_STATUS_TIME = config.getint("service", "device_status_time_minutes", fallback=1)
APPLY_PROCESSOR_COUNT = config.getboolean("service", "apply_processor_count", fallback=False)
workers = config.getint("service", "workers", fallback=1)
threads = config.getint("service", "threads", fallback=1)
ip_check = config.getboolean("service", "ip_check", fallback=False)
cookie_max_age = config.getint("service", "cookie_max_age_in_mins", fallback=60)
max_attempts = config.getint("service", "max_attempts")
lockout_time_interval = config.getint("service", "lockout_time_interval")
reset_time_interval = config.getfloat("service", "reset_time_interval")
http_flag = config.getboolean('service', 'httpflag')
secure_flag = config.getboolean('service', 'secureflag')
"""
Log Info
"""
LOG_FILE_NAME = config["log"]["file_name"]
LOG_PATH = config["log"]["path"]
LOG_LEVEL = config["log"]["level"]
LOG_HANDLER = config["log"]["handler"]
LOG_MAX_BYTES = config.getint("log", "max_bytes")
LOG_BACKUP_COUNT = config.getint("log", "back_up_count")
"""
Mongo Info
"""
MONGO_HOST = config["mongo_db"]["host"]
MONGO_PORT = config.getint("mongo_db", "port", fallback=2717)
MONGO_USERNAME = config["mongo_db"]["username"]
MONGO_PASSWORD = config["mongo_db"]["password"]
MONGO_AUTHSOURCE = config["mongo_db"]["authSource"]
MONGO_AUTHMECHANISM = config["mongo_db"]["authMechanism"]
encryption_constants_file_path = config["mongo_db"]["mongo_constants_file_path"]
"""
Kairos Info
"""
KAIROS_DB_URL = config["kairos_db"]["url"]
"""
Additional info
"""
SYSTEM_LOGIN_DOMAIN_DOMAIN_LIST = json.loads(config["system_login"]["domain_list"])
CSV_PATH = config["csv_conf"]["csv_path"]
UPLOAD_CSV_PATH = config["csv_conf"]["upload_path"]
PROFILE_PIC_PATH = config["profile_pic_path"]["base_path"]
"""
Manual Entry conf
"""
SCHEDULER_BASE_URL = config["schedule_rule_engine"]["url"]
"""
Upload Parser
"""
# upload_parser = config["upload_parser"]
"""
Default email sender
"""
default_email_url = config["email_default_baseurl"]["base_url"]
"""
AI Rule : Auto Container Deployment Service
"""
AI_RULE_STATUS = config["ai_rules"]["enabled"]
if AI_RULE_STATUS.lower() == "true":
AI_RULE_DOCKER_ENDPOINT = config["ai_rules"]["docker_endpoint"]
else:
AI_RULE_DOCKER_ENDPOINT = "localhost"
CONTAINER_URL = config["flow_model"]["container_url"]
IMAGE_PATH = config.get('IMAGE', 'path', fallback="images/")
LICENSE_PATH = config.get('LICENSE_PATH', 'path', fallback="license/")
PIPELINE_INTERNAL_SECTION = 'pipeline_internal'
PIPELINE_INTERNAL_CONFIGURATION = {
"mqtt_broker_host": config.get(PIPELINE_INTERNAL_SECTION, 'mqtt_broker_host', fallback='localhost'),
"mqtt_broker_port": config.get(PIPELINE_INTERNAL_SECTION, 'mqtt_broker_port', fallback='1883'),
"mqtt_broker_ssl": config.get(PIPELINE_INTERNAL_SECTION, 'mqtt_broker_ssl', fallback='false'),
"mqtt_broker_conn_type": config.get(PIPELINE_INTERNAL_SECTION, 'mqtt_broker_conn_type', fallback='tcp'),
"mqtt_broker_ws_port": config.get(PIPELINE_INTERNAL_SECTION, 'mqtt_broker_ws_port', fallback='8083'),
"kafka_broker": config.get(PIPELINE_INTERNAL_SECTION, 'queue_host', fallback='localhost'),
"kafka_port": config.get(PIPELINE_INTERNAL_SECTION, 'queue_port', fallback='localhost'),
"broker_ssl_path": config.get(PIPELINE_INTERNAL_SECTION, 'broker_ssl_path', fallback='/opt/ssl'),
"node_intermediate": 'kafka' if config.get(PIPELINE_INTERNAL_SECTION, 'node_intermediate',
fallback='kafka') == 'queue' else "mqtt"
}
CHANNEL_PIPELINE_CONFIG = config['channel_pp_debug_node']
MQTT_BROKER_HOST = config['pipeline_internal']["mqtt_broker_host"]
MQTT_BROKER_PORT = config['pipeline_internal']["mqtt_broker_port"]
MQTT_BROKER_SSL = config['pipeline_internal']["mqtt_broker_ssl"]
MQTT_BROKER_CONN_TYPE = config['pipeline_internal']["mqtt_broker_conn_type"]
MQTT_BROKER_WS_PORT = config['pipeline_internal']["mqtt_broker_ws_port"]
BROKER_SSL_PATH = PIPELINE_INTERNAL_CONFIGURATION.get("broker_ssl_path", '/opt/ssl')
MANAGER_URL = config["AGENT"]["manager_url"]
DEVICE_MODE = config.get('AGENT', 'device_mode', fallback='agent')
SSL_CERT_PATH = config.get('AGENT', 'ssl_base_path', fallback='/opt/ssl')
TIME_SYNC_URL = config.get('AGENT', 'time_sync_url', fallback=None)
TIME_SYNC_URL_CERT_PATH = config.get('AGENT', 'cert_path', fallback=False)
# Redis Details
redis_host = str(config["REDIS"]["host"])
redis_port = int(config["REDIS"]["port"])
key_expiry_in_secs = int(config["REDIS"]["key_expiry"])
rules_redis_db = int(config["REDIS"]["rules_db"])
alarms_redis_db = int(config["REDIS"]["alarms_db"])
live_tags_db = int(config["REDIS"]["live_tags_db"])
audit_db = int(config["REDIS"]["audit_db"])
audit_queue = str(config["REDIS"]["audit_queue"])
#iLens version
version = str(config["ILENS_VERSION"]["version"])
# Data Processor configs
data_processor_host = config.get('DATA_PROCESSOR', 'host', fallback=None)
data_processor_conn_type = config.get('DATA_PROCESSOR', 'connection', fallback='tcp')
data_processor_ssl = bool(config.get('DATA_PROCESSOR', 'ssl_enabled', fallback=False))
data_processor_port = int(config.get('DATA_PROCESSOR', 'port', fallback=1883))
data_processor_topic = str(config.get('DATA_PROCESSOR', 'topic', fallback=None))
LICENSE_SERVER = config.get('LICENSE_SERVER','host', fallback="localhost")
LICENSE_PORT = config.get('LICENSE_SERVER','port', fallback=9816)
\ No newline at end of file
class Endpoints:
base_url = "/support_lens"
category_list = base_url + "/category/list"
category_save = base_url + "/category/save"
category_edit = base_url + "/category/edit"
category_fetch = base_url + "/category/fetch"
class DBMapping:
# DBConstants
support_lens_configuration = "supportlens_configuration"
# CollectionConstants
category_configuration = "category_configuration"
subcategory_configuration = "subcategory_configuration"
class StatusMessages:
SUCCESS = "success"
FAILED = "failed"
CATEGORY_LIST = "Failed to fetch category list"
CATEGORY_DATA = "Failed to save category data"
CATEGORY_DELETE = "Failed to delete category data"
CATEGORY_FETCH = "Failed to fetch category data"
class StaticJsons:
CATEGORY_HEADERCONTENT = [
{
"label": "Category Name",
"key": "category_name"
},
{
"label": "Description",
"key": "description"
},
{
"label": "Sub Categories",
"key": "sub_categories"
}
]
class ConnectionObj:
def __init__(self):
self.mongo_connection_obj = None
ConnectionObj = ConnectionObj()
import time
from copy import deepcopy
from scripts.config.app_constants import StatusMessages, StaticJsons, DBMapping
from scripts.config.db_connection_obj import ConnectionObj
from scripts.logging.logger import logger
from scripts.utils.get_new_id import GetNewId
from scripts.utils.mongo_utility import MongoConnect
class SupportLensHandler:
def __init__(self):
try:
logger.debug("Inside the Support lens Handler module")
self.new_id = GetNewId()
self.mongo_obj = ConnectionObj.mongo_connection_obj
if not ConnectionObj.mongo_connection_obj:
self.mongo_obj = ConnectionObj.mongo_connection_obj = MongoConnect()
except Exception as e:
logger.exception("Exception in the data utility definition" + str(e))
def get_category_list(self, input_json):
final_json = {"status": StatusMessages.FAILED,
"data": dict(headerContent=StaticJsons.CATEGORY_HEADERCONTENT, bodyContent=list())}
logger.debug("Inside get_category_list definition")
try:
records = list(self.mongo_obj.aggregate(
db_name=DBMapping.support_lens_configuration,
collection_name=DBMapping.category_configuration,
list_for_aggregation=[
{
'$match': {
'project_id': input_json["project_id"]
}
}, {
'$project': {
'_id': 0,
'category_id': 'category_id',
'category_name': 'CategoryName',
"sub_categories": '$SubCategories',
'description': '$Description'
}
}
]))
for each_record in records:
if "sub_categories" in each_record:
sub_categories = [item["sub_CategoryName"] for item in each_record["sub_categories"]]
each_record["sub_categories"] = ",".join(sub_categories)
final_json["data"]["bodyContent"] = deepcopy(records)
final_json["status"] = StatusMessages.SUCCESS
except Exception as e:
logger.debug("Exception occurred while fetching category list data" + str(e))
return final_json
def fetch_category_data(self, input_json):
final_json = {"status": StatusMessages.FAILED,
"data": dict()}
logger.debug("Inside fetch_category_data definition")
try:
records = self.mongo_obj.aggregate(
db_name=DBMapping.support_lens_configuration,
collection_name=DBMapping.category_configuration,
list_for_aggregation=[
{
'$match': {
'project_id': input_json["project_id"],
"category_id": input_json["category_id"]
}
}, {
'$project': {
'_id': 0,
'category_name': 'CategoryName',
"sub_categories": '$SubCategories',
'description': '$Description'
}
}
])
if len(records["sub_categories"]):
for data in records["sub_categories"]:
data['label'] = data.pop("sub_CategoryName")
final_json["data"] = deepcopy(records)
final_json["status"] = StatusMessages.SUCCESS
except Exception as e:
logger.debug("Exception occurred while fetching category data" + str(e))
return final_json
def save_category_data(self, input_json):
final_json = dict(status=StatusMessages.FAILED, message=StatusMessages.CATEGORY_DATA)
logger.debug("Inside save_category_data definition")
try:
for data in input_json["sub_categories"]:
data['sub_CategoryName'] = data.pop("label")
if data["sub_category_id"] in [str(), ""]:
sub_category_id = self.save_subCategory_data(label_name=data["sub_CategoryName"],
project_id=input_json["project_id"])
if not sub_category_id:
return final_json
data["sub_category_id"] = sub_category_id
if input_json["type"] == "save":
insert_json = dict(CategoryName=input_json["category_name"], Description=input_json["description"],
SubCategories=input_json["sub_categories"],
category_id="category_" + self.new_id.get_next_id("category"),
project_id=input_json["project_id"],
created_on=time.time(),
created_by=self.new_id.get_user_id(),
last_updated_on=time.time(),
last_updated_by=self.new_id.get_user_id()
)
self.mongo_obj.insert_one(database_name=DBMapping.support_lens_configuration,
collection_name=DBMapping.category_configuration,
json_data=insert_json)
if input_json["type"] == "edit":
update_json = dict(CategoryName=input_json["category_name"], Description=input_json["description"],
SubCategories=input_json["sub_categories"],
category_id=input_json["category_id"],
created_on=time.time(),
project_id=input_json["project_id"],
created_by=self.new_id.get_user_id(),
last_updated_on=time.time(),
last_updated_by=self.new_id.get_user_id()
)
query_json = dict(project_id=input_json["project_id"], category_id=input_json["category_id"])
self.mongo_obj.update_one(db_name=DBMapping.support_lens_configuration,
collection_name=DBMapping.category_configuration,
set_json=update_json, query=query_json, upsert=True)
final_json = dict(status=StatusMessages.SUCCESS, message="Category saved Successfully")
except Exception as e:
logger.debug("Exception occurred while saving category data" + str(e))
return final_json
def save_subCategory_data(self, label_name, project_id):
try:
insert_json = dict(sub_CategoryName=label_name,
sub_category_id="sub_category_" + self.new_id.get_next_id("sub_category"),
project_id=project_id,
created_on=time.time(),
created_by=self.new_id.get_user_id())
self.mongo_obj.insert_one(database_name=DBMapping.support_lens_configuration,
collection_name=DBMapping.subcategory_configuration,
json_data=insert_json)
return insert_json["sub_category_id"]
except Exception as e:
logger.debug("Exception occurred while saving save_subCategory_data" + str(e))
return False
def delete_category_data(self, input_json):
final_json = dict(status=StatusMessages.FAILED, message=StatusMessages.CATEGORY_DELETE)
try:
query_json = dict(project_id=input_json["project_id"], category_id=input_json["category_id"])
self.mongo_obj.delete_one_record(db_name=DBMapping.support_lens_configuration,
collection_name=DBMapping.category_configuration,
query_json=query_json)
final_json = dict(status=StatusMessages.SUCCESS, message="Category deleted Successfully")
except Exception as e:
logger.debug("Exception occurred while deleting category data" + str(e))
return final_json
from flask import Blueprint, request
from scripts.config.app_constants import Endpoints, StatusMessages
from scripts.core.handler.supportlens_handler import SupportLensHandler
from scripts.logging.logger import logger
from scripts.utils.AESEnc import apply_encryption
support_lens_blueprint = Blueprint("support_lens_blueprint", __name__)
handler_obj = SupportLensHandler()
@support_lens_blueprint.route(Endpoints.category_list, methods=['POST'])
@apply_encryption
def get_category_list():
final_json = {"status": StatusMessages.FAILED, "message": StatusMessages.CATEGORY_LIST}
try:
input_data = request.data
# input_data = request.get_json()
final_json = handler_obj.get_category_list(input_json=input_data)
except Exception as e:
logger.exception("Exception -> %s" % str(e))
return final_json
@support_lens_blueprint.route(Endpoints.category_save, methods=['POST'])
@apply_encryption
def save_category_data():
final_json = {"status": StatusMessages.FAILED, "message": StatusMessages.CATEGORY_DATA}
try:
input_data = request.data
# input_data = request.get_json()
input_data.update(type="save")
final_json = handler_obj.save_category_data(input_json=input_data)
except Exception as e:
logger.exception("Exception -> %s" % str(e))
return final_json
@support_lens_blueprint.route(Endpoints.category_edit, methods=['POST'])
@apply_encryption
def edit_category_data():
final_json = {"status": StatusMessages.FAILED, "message": StatusMessages.CATEGORY_DATA}
try:
input_data = request.data
# input_data = request.get_json()
input_data.update(type="edit")
final_json = handler_obj.save_category_data(input_json=input_data)
except Exception as e:
logger.exception("Exception -> %s" % str(e))
return final_json
@support_lens_blueprint.route(Endpoints.category_fetch, methods=['POST'])
@apply_encryption
def fetch_category_data():
final_json = {"status": StatusMessages.FAILED, "message": StatusMessages.CATEGORY_FETCH}
try:
input_data = request.data
# input_data = request.get_json()
final_json = handler_obj.fetch_category_data(input_json=input_data)
except Exception as e:
logger.exception("Exception -> %s" % str(e))
return final_json
@support_lens_blueprint.route(Endpoints.category_fetch, methods=['POST'])
@apply_encryption
def fetch_category_data():
final_json = {"status": StatusMessages.FAILED, "message": StatusMessages.CATEGORY_DATA}
try:
input_data = request.data
# input_data = request.get_json()
final_json = handler_obj.delete_category_data(input_json=input_data)
except Exception as e:
logger.exception("Exception -> %s" % str(e))
return final_json
# --- All errors from this code base belong to the code series 0001-1000 --- #
IL0001 = 'Error Code IL0001: Login timeout, Please Login again'
IL0002 = 'Error Code IL0002: ILens faced an issue when processing.' \
' Please contact the administrator to resolve the issue.'
IL0003 = 'Error Code IL0003: Unsupported method was requested to server'
# --- All broader exceptions codes belong to the code series 1001-2000 --- #
IL1001 = ''
# --- All errors from dependent libraries belong to their respective code series --- #
# All error in the MONGO... series are exception codes for errors related to MongoDB
MONGO001 = "Error Code MONGO001: Server was unable to establish connection with MongoDB"
MONGO002 = "Error Code MONGO002: Server faced a problem when inserting document(s) into MongoDB"
MONGO003 = "Error Code MONGO003: Server faced a problem to find the document(s) with the given condition"
MONGO004 = "Error Code MONGO004: Server faced a problem to delete the document(s) with the given condition"
MONGO005 = "Error Code MONGO005: Server faced a problem to update the document(s) with the given condition and data"
MONGO006 = "Error Code MONGO006: Server faced a problem when aggregating the data"
MONGO007 = "Error Code MONGO007: Server faced a problem when closing MongoDB connection"
MONGO008 = "Error Code MONGO008: Found an existing record with the same ID in MongoDB"
MONGO009 = "Error Code MONGO009: Server faced a problem when fetching distinct documents from MongoDB"
MONGO010 = "Error Code MONGO010: Server faced a problem when performing a search and replace in MongoDB"
MONGO011 = "Error Code MONGO011: Server faced a problem when de-serializing MongoDB object"
# All error in the DCK... series are exception codes for errors related to Docker
DCK001 = "Error Code DCK001: Invalid docker client identifier"
class ILensException(Exception):
pass
class MongoException(ILensException):
pass
class MongoConnectionException(MongoException):
pass
class MongoQueryException(MongoException):
pass
class MongoEncryptionException(MongoException):
pass
class MongoRecordInsertionException(MongoQueryException):
pass
class MongoFindException(MongoQueryException):
pass
class MongoDeleteException(MongoQueryException):
pass
class MongoUpdateException(MongoQueryException):
pass
class MongoUnknownDatatypeException(MongoEncryptionException):
pass
class MongoDistictQueryException(MongoException):
pass
class MongoFindAndReplaceException(MongoException):
pass
class MongoObjectDeserializationException(MongoException):
pass
class DeviceManagerException(ILensException):
pass
class DeviceNameAlreadyExistsException(DeviceManagerException):
pass
import logging
import os
from logging.handlers import RotatingFileHandler
from scripts.config import app_configuration
_log_file_name__ = str(app_configuration.LOG_FILE_NAME)
__log_path__ = str(app_configuration.LOG_PATH)
__log_level__ = str(app_configuration.LOG_LEVEL)
__handler_type__ = str(app_configuration.LOG_HANDLER)
__max_bytes__ = int(app_configuration.LOG_MAX_BYTES)
__backup_count__ = int(app_configuration.LOG_BACKUP_COUNT)
complete_log_path = os.path.join(__log_path__, _log_file_name__)
if not os.path.isdir(__log_path__):
os.makedirs(__log_path__)
logging.trace = logging.DEBUG - 5
logging.addLevelName(logging.DEBUG - 5, 'TRACE')
class ILensLogger(logging.getLoggerClass()):
def __init__(self, name):
super().__init__(name)
def trace(self, msg, *args, **kwargs):
if self.isEnabledFor(logging.trace):
self._log(logging.trace, msg, args, **kwargs)
def get_logger(log_file_name=complete_log_path, log_level=__log_level__, time_format="%Y-%m-%d %H:%M:%S",
handler_type=__handler_type__, max_bytes=__max_bytes__, backup_count=__backup_count__):
"""
Creates a rotating log
"""
log_file = os.path.join(log_file_name + '.log')
logging.setLoggerClass(ILensLogger)
__logger__ = logging.getLogger(log_file_name)
__logger__.setLevel(log_level.strip().upper())
debug_formatter = '%(asctime)s - %(levelname)-6s - %(name)s - [%(threadName)5s:%(filename)5s:' \
'%(funcName)5s():''%(lineno)s] - %(message)s'
formatter_string = '%(asctime)s - %(levelname)-6s - %(name)s - %(levelname)3s - %(message)s'
if log_level.strip().upper() == log_level:
formatter_string = debug_formatter
formatter = logging.Formatter(formatter_string, time_format)
# if str(handler_type).lower() == "console_handler":
# Console Handler
console_handler = logging.StreamHandler()
console_handler.setFormatter(formatter)
if __logger__.hasHandlers():
__logger__.handlers.clear()
# __logger__.addHandler(console_handler)
if str(handler_type).lower() == "rotating_file_handler":
# Rotating File Handler
handler = RotatingFileHandler(log_file, maxBytes=max_bytes, backupCount=backup_count)
handler.setFormatter(formatter)
if __logger__.hasHandlers():
__logger__.handlers.clear()
__logger__.addHandler(handler)
else:
# File Handler
hdlr_service = logging.FileHandler(log_file)
hdlr_service.setFormatter(formatter)
if __logger__.hasHandlers():
__logger__.handlers.clear()
__logger__.addHandler(hdlr_service)
return __logger__
logger = get_logger()
import base64
import json
import uuid
import hashlib
from functools import wraps
from flask import request, make_response, jsonify, session
from Cryptodome import Random
from Cryptodome.Cipher import AES
# from Crypto import Random
# from Crypto.Cipher import AES
from datetime import datetime
from scripts.config import app_configuration as config, app_constants, app_configuration
max_age_in_mins = config.cookie_max_age
class AESCipher(object):
"""
A classical AES Cipher. Can use any size of data and any size of password thanks to padding.
Also ensure the coherence and the type of the data with a unicode to byte converter.
"""
def __init__(self, key):
self.bs = 16
self.key = AESCipher.str_to_bytes(key)
@staticmethod
def str_to_bytes(data):
u_type = type(b''.decode('utf8'))
if isinstance(data, u_type):
return data.encode('utf8')
return data
def _pad(self, s):
return s + (self.bs - len(s) % self.bs) * AESCipher.str_to_bytes(chr(self.bs - len(s) % self.bs))
@staticmethod
def _unpad(s):
return s[:-ord(s[len(s) - 1:])]
def encrypt(self, raw):
raw = self._pad(AESCipher.str_to_bytes(raw))
iv = Random.new().read(AES.block_size)
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return base64.b64encode(iv + cipher.encrypt(raw)).decode('utf-8')
def decrypt(self, enc):
enc = base64.b64decode(enc)
iv = enc[:AES.block_size]
cipher = AES.new(self.key, AES.MODE_CBC, iv)
data = self._unpad(cipher.decrypt(enc[AES.block_size:]))
return data.decode('utf-8')
def create_cookie(request_json, request, user_id=None):
"""
This method is to create a cookie
"""
status = False
cookie_encoded_str = ""
# user_id = ""
try:
if user_id is None:
user_id = request.cookies.get("user_id")
client_ip = request.headers.get('X-Forwarded-For').split(',')[0].split(':')[0]
cookie_string = (
user_id + '^' +
client_ip + "^" +
request.headers.get('User-Agent') + "^" + datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ'))
cookie_encoded_str = AESCipher(app_constants.KEYS.cookie_encryption_private_key).encrypt(cookie_string)
return True, cookie_encoded_str
except Exception as e:
# traceback.print_exc()
return status, cookie_encoded_str
def validate_cookie(cookie_enc_str, request):
"""
This method is to validate the cookie
"""
status = False
encrypted_str = None
ip_check = True
user_id = request.cookies['user_id']
try:
headers = request.headers
decrypted_string = AESCipher(app_constants.KEYS.cookie_encryption_private_key).decrypt(cookie_enc_str)
decrypted_data = decrypted_string.split('^')
client_ip = headers.get('X-Forwarded-For').split(',')[0].split(':')[0]
user_agent = request.headers.get('User-Agent')
time_diff = (datetime.now() - datetime.strptime(decrypted_data[3], "%Y-%m-%dT%H:%M:%SZ")).total_seconds()
if time_diff > max_age_in_mins * 60:
raise Exception("Session Expired")
#Commented the existing code for Hotfix - Session Logout happening due to Validation of IP happening in Client Networks Fixed by VigneshRavishankar
#if decrypted_data[0] == user_id and \
# decrypted_data[1] == client_ip and \
# decrypted_data[2] == request.headers.get('User-Agent'):
# encrypted_str = decrypted_data
# return True, encrypted_str
# print(f'Input User: {decrypted_data[0]} | Validate User: {user_id}')
# print(f'Input IP: {decrypted_data[1]} | Validate IP: {client_ip}')
# print(f'Input Agent: {decrypted_data[2]} | Validate Agent: {user_agent}')
if decrypted_data[0] == user_id:
user_check = True
else:
raise Exception('Invalid Cookie (User validation failed)')
if app_configuration.ip_check:
if decrypted_data[1] == client_ip:
ip_check = True
else:
raise Exception('Invalid Cookie (Agent validation failed)')
if decrypted_data[2] == user_agent:
agent_check = True
else:
raise Exception('Invalid Cookie (Agent validation failed)')
if user_check and ip_check and agent_check:
encrypted_str = decrypted_data
return True, encrypted_str
else:
raise Exception("Invalid cookie")
except Exception as e:
print("Error : {}".format(str(e)))
return status, encrypted_str
def apply_encryption(func):
"""
This function is to be use as a decorator only,
This function works like a wrapper which will decrypt the data before sending to the function and also encrypt
the data before returning.
:param func:
:return: wrapper
"""
@wraps(func)
def wrapper(*args, **kwargs):
if config.SERVICE_ENABLE_SECURITY is True:
session_id = str(request.cookies['session_id'])[:16]
data = request.data
data = data.decode()
if 'session_id' in request.cookies:
status, encoded_str = validate_cookie(request.cookies['session_id'], request)
else:
return make_response(json.dumps({"status": "session_invalid", "message": "Invalid Session"}), 401)
if status:
request.data = json.loads(AESCipher(session_id).decrypt(data))
response = func(*args, **kwargs)
data = AESCipher(session_id).encrypt(json.dumps(response))
return data
else:
return make_response(json.dumps({"status": "session_invalid", "message": "Invalid Session"}), 401)
else:
# if "session_id" not in session:
# return make_response(json.dumps({"status": "session_invalid", "message": "Invalid Session"}), 401)
request.data = json.loads(request.data.decode())
if 'session_id' in request.cookies:
status, encoded_str = validate_cookie(request.cookies['session_id'], request)
else:
return make_response(json.dumps({"status": "session_invalid", "message": "Invalid Session"}), 401)
if status:
response = func(*args, **kwargs)
if str(request.url_rule).endswith('reset_password'):
return response
else:
response = make_response(json.dumps(response), 200)
response.headers['X-Content-Type-Options'] = 'nosniff'
response.headers['X-Frame-Options'] = 'SAMEORIGIN'
response.headers['Cache-Control'] = 'no-store'
cookie_status, secure_cookie = create_cookie(request.data, request)
# response.set_cookie('session_id', request.cookies['session_id'], max_age=max_age_in_mins * 60 ,httponly=app_configuration.http_flag)
if cookie_status:
response.set_cookie('session_id', secure_cookie, httponly=app_configuration.http_flag)
return response
else:
raise Exception("Failed to set cookie")
else:
return make_response(json.dumps({"status": "session_invalid", "message": "Invalid Session"}), 401)
return wrapper
def login_wrap(func):
"""
This function is used as a decorator only for login services
:param func:
:return: wrapper
"""
@wraps(func)
def wrap_login(*args, **kwargs):
if config.SERVICE_ENABLE_SECURITY is True:
# session_id_1 = str(uuid.uuid4()).replace("-", "")
# session["session_id"] = session_id_1
session_id = str(session["session_id"])
response = func(*args, **kwargs)
resp = make_response(response, 200)
resp.set_cookie("session_id", session_id, max_age=max_age_in_mins * 60)
return resp
else:
# session_id_1 = str(uuid.uuid4()).replace("-", "")
# session["session_id"] = session_id_1
session_id = str(session["session_id"])
request.data = json.loads(request.data.decode())
response = func(*args, **kwargs)
response = make_response(response, 200)
response.set_cookie("session_id", session_id, max_age=max_age_in_mins * 60)
return response
return wrap_login
class CipherEncryption(object):
def __init__(self, key):
self.bs = AES.block_size
self.key = hashlib.sha256(key.encode()).digest()
def encrypt(self, raw):
raw = self._pad(raw)
iv = Random.new().read(AES.block_size)
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return base64.b64encode(iv + cipher.encrypt(raw.encode()))
def decrypt(self, enc):
enc = base64.b64decode(enc)
iv = enc[:AES.block_size]
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return self._unpad(cipher.decrypt(enc[AES.block_size:])).decode('utf-8')
def _pad(self, s):
return s + (self.bs - len(s) % self.bs) * chr(self.bs - len(s) % self.bs)
@staticmethod
def _unpad(s):
return s[:-ord(s[len(s) - 1:])]
from flask import request
from scripts.config import app_constants
from scripts.utils.mongo_utility import MongoConnect
from scripts.config.db_connection_obj import ConnectionObj
if ConnectionObj.mongo_connection_obj is None:
ConnectionObj.mongo_connection_obj = MongoConnect()
my_col = app_constants.DBMapping.unique_id
metadata = app_constants.DBMapping.mongo_db_name
class GetNewId:
@staticmethod
def get_next_id(param1):
my_doc = ConnectionObj.mongo_connection_obj.find_one(metadata, my_col, {"key": param1})
if not my_doc:
my_dict = {"key": param1, "id": "100"}
ConnectionObj.mongo_connection_obj.database_insertion(metadata, my_col, my_dict)
return my_dict['id']
else:
my_query = {"key": param1}
my_doc = [ConnectionObj.mongo_connection_obj.find_one(metadata, my_col, my_query, {"_id": 0, "id": 1})]
for each_document in my_doc:
count_value = str(int(each_document['id']) + 1)
new_values = {"$set": {"id": count_value}}
ConnectionObj.mongo_connection_obj.update_one(metadata, my_col, my_query, new_values)
return str(int(each_document['id']) + 1)
@staticmethod
def get_user_id(input_data=None):
if "user_id" in request.cookies:
user_id_from_cookies = request.cookies.get("user_id")
# if enable_security:
# session_id = request.cookies.get("session_id")
# key = session_id[:16]
# user_id = AESCipher(key).decrypt(user_id_from_cookies)
# else:
user_id = user_id_from_cookies
return {"status": "success", "user_id": user_id}
else:
if "user_id" in input_data and input_data["user_id"] != "":
user_id = input_data["user_id"]
return {"status": "success", "user_id": user_id}
else:
return {"status": "failed", "message": "user_id is missing from input"}
\ No newline at end of file
""" Mongo DB utility
All definitions related to mongo db is defined in this module
"""
import base64
import copy
import hashlib
import json
import os
import traceback
from concurrent.futures import ThreadPoolExecutor
from datetime import datetime
from operator import itemgetter
from uuid import UUID
# from Crypto import Random
# from Crypto.Cipher import AES
from Cryptodome import Random
from Cryptodome.Cipher import AES
from bson import ObjectId
from pymongo import MongoClient
from pymongo.errors import ConnectionFailure
from pymongo.errors import DuplicateKeyError as MongoDuplicateKeyError
from scripts.config import app_configuration as config
from scripts.config.app_constants import MongoEncryptionConstants
from scripts.config.db_connection_obj import ConnectionObj
from scripts.logging.logger import logger
from scripts.exceptions.exception_codes import MONGO001, MONGO002, MONGO003, MONGO004, MONGO005, MONGO006, MONGO007, \
MONGO009, MONGO010, MONGO011
from scripts.exceptions.module_exceptions import MongoException, MongoConnectionException, MongoQueryException, \
MongoRecordInsertionException, MongoFindException, MongoDeleteException, MongoUpdateException, \
MongoUnknownDatatypeException, MongoDistictQueryException, MongoObjectDeserializationException
exclude_encryption_datatypes = (datetime, UUID,)
try:
file_name = config.encryption_constants_file_path
if not os.path.exists(file_name):
encrypt_collection_dict = {}
else:
with open(file_name) as f:
mongo_encryption_constants_data = json.load(f)
if "encrypt_collection_dict" in mongo_encryption_constants_data:
encrypt_collection_dict = mongo_encryption_constants_data["encrypt_collection_dict"]
else:
encrypt_collection_dict = {}
except Exception as es:
encrypt_collection_dict = {}
logger.exception(" Unable to fetch mongo encryption constants:" + str(es))
class AESCipher(object):
def __init__(self, key):
# key = cipher_key
self.bs = AES.block_size
self.key = hashlib.sha256(key.encode()).digest()
def encrypt(self, raw):
raw = self._pad(raw)
iv = Random.new().read(AES.block_size)
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return base64.b64encode(iv + cipher.encrypt(raw.encode()))
def decrypt(self, enc):
enc = base64.b64decode(enc)
iv = enc[:AES.block_size]
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return self._unpad(cipher.decrypt(enc[AES.block_size:])).decode('utf-8')
def _pad(self, s):
return s + (self.bs - len(s) % self.bs) * chr(self.bs - len(s) % self.bs)
@staticmethod
def _unpad(s):
return s[:-ord(s[len(s) - 1:])]
class MongoDataEncryption(object):
def __init__(self):
self.aes_cipher = AESCipher(key=MongoEncryptionConstants.cipher_key['k'])
# pass
def create_encrypted_string(self, payload):
return self.aes_cipher.encrypt(raw=json.dumps(payload))
def create_decrypted_string(self, payload):
result = json.loads(self.aes_cipher.decrypt(enc=payload))
return result
def encrypt_data(self, json_data, collection_name):
"""
Encrypt the data in mongo based on the collection and key to be encrypted.
:param json_data: The data to be encrypted
:param collection_name: The collection where the document is stored
:return: Encrypted document based on product defined configuration.
"""
# TODO: Automatically add an unsupported data type to the salt.
try:
if collection_name in encrypt_collection_dict.keys():
if type(json_data) is list:
encrypted_data = list()
for data in encrypted_data:
dict_data = self.encrypt_dict_data(doc=data, collection_name=collection_name)
encrypted_data.append(dict_data)
elif type(json_data) is dict:
encrypted_data = self.encrypt_dict_data(doc=json_data, collection_name=collection_name)
else:
raise MongoUnknownDatatypeException("Unsupported datatype '{}' is being inserted to mongodb.".
format(type(json_data)))
else:
logger.debug("Given data is not a part of the Mongo encryption setup. Skipping encryption")
if type(json_data) is dict:
encrypted_data = json_data
encrypted_data[MongoEncryptionConstants.product_encrypted] = False
else:
encrypted_data = json_data
return encrypted_data
except MongoException as e:
raise MongoException(str(e))
except Exception as e:
raise MongoException("Server faced a problem when encrypting the data --> {}".format(str(e)))
def encrypt_dict_data(self, doc, collection_name):
"""
This method crawls the document and encrypts the keys that are marked for encryption.
Skips encrypting the keys with values of the datatypes defines in the tuple 'exclude_encryption_datatypes'
Adds two new keys to the document 'product_encrypted' and 'encryption_salt'
Key product_encrypted - Is a boolean value which flags a document as encrypted by this utility.
Key encryption_salt - List of all the values that were excluded from encryption due to datatype constraints.
:param doc: The document considered for encryption
:param collection_name: The collection where the document resided.
This is needed for the utility to read the encryption configuration
:return: The input document with the relevant keys encrypted.
"""
try:
is_mlens_encrypted = False
encrypted_data = dict()
encrypted_data["encryption_salt"] = dict()
if '*' in encrypt_collection_dict[collection_name][MongoEncryptionConstants.key_encrypt_keys]:
# Forming encryption salt
for index, exclude_encryption_datatype in enumerate(exclude_encryption_datatypes):
if exclude_encryption_datatype not in [None, '']:
encrypted_data["encryption_salt"]["dt_{}".format(index)] = \
self.search_datatype(doc, exclude_encryption_datatype)
sorted_path = sorted(encrypted_data["encryption_salt"]["dt_{}".format(index)],
key=itemgetter('p'), reverse=True)
for path_index, _path in enumerate(sorted_path):
to_pop = self.remove_value_of_datatype_command(_path, "dict_data")
exec(to_pop)
for dt in encrypted_data["encryption_salt"]:
for path_index, _path in enumerate(encrypted_data["encryption_salt"][dt]):
encrypted_data["encryption_salt"][dt][path_index]['p'] = base64.b64encode(_path['p'].encode())
# Encrypting the data
for key in doc.keys():
if key not in \
encrypt_collection_dict[collection_name][MongoEncryptionConstants.key_exclude_encryption]:
encrypted_data[key] = {'d': self.create_encrypted_string(payload=self.convert(doc[key])),
't': base64.b64encode(type(doc[key]).__name__.encode())}
is_mlens_encrypted = True
else:
encrypted_data[key] = doc[key]
else:
for key in doc.keys():
if key in encrypt_collection_dict[collection_name][MongoEncryptionConstants.key_encrypt_keys]:
# Forming encryption salt
for index, exclude_encryption_datatype in enumerate(exclude_encryption_datatypes):
if exclude_encryption_datatype not in [None, '']:
temp_dict_data = dict()
temp_dict_data[key] = copy.deepcopy(doc[key])
encrypted_data["encryption_salt"]["dt_{}".format(index)] = \
self.search_datatype(temp_dict_data, exclude_encryption_datatype)
sorted_path = sorted(encrypted_data["encryption_salt"]["dt_{}".format(index)],
key=itemgetter('p'), reverse=True)
for path_index, _path in enumerate(sorted_path):
to_pop = self.remove_value_of_datatype_command(_path, "dict_data")
exec(to_pop)
for dt in encrypted_data["encryption_salt"]:
for path_index, _path in enumerate(encrypted_data["encryption_salt"][dt]):
encrypted_data["encryption_salt"][dt][path_index]['p'] = base64.b64encode(
_path['p'].encode())
# Encrypting the data
encrypted_data[key] = {'d': self.create_encrypted_string(payload=self.convert(doc[key])),
't': base64.b64encode(type(doc[key]).__name__.encode())}
is_mlens_encrypted = True
else:
encrypted_data[key] = doc[key]
encrypted_data[MongoEncryptionConstants.product_encrypted] = is_mlens_encrypted
if not encrypted_data[MongoEncryptionConstants.product_encrypted]:
del encrypted_data["encryption_salt"]
return encrypted_data
except MongoException as e:
raise MongoException(str(e))
except Exception as e:
raise MongoException("Server faced a problem when encrypting the data --> {}".format(str(e)))
def decrypt_data(self, dict_data, _collection_name):
"""
This method decrypts all the data that is encrypted.
Keys that were excluded during encryption and have been added to the encryption_salt
will be added back to their original positions.
:param dict_data: The document that needs to be decrypted
:param _collection_name: The collection to which the document belongs to
:return: The decrypted data with the original data types intact
"""
try:
if _collection_name in encrypt_collection_dict.keys():
decrypted_data = dict()
if '*' in encrypt_collection_dict[_collection_name][MongoEncryptionConstants.key_encrypt_keys]:
for key in dict_data.keys():
if key not in encrypt_collection_dict[_collection_name][
MongoEncryptionConstants.key_exclude_encryption] and \
not isinstance(dict_data[key], exclude_encryption_datatypes):
if type(dict_data[key]) is dict:
if 'd' in dict_data[key].keys() and 't' in dict_data[key].keys():
decrypted_data[key] = self.decrypt_convert_proper_data_type(
data=self.create_decrypted_string(payload=dict_data[key]['d']),
data_type=base64.b64decode(dict_data[key]['t'].decode()).decode()
)
else:
decrypted_data[key] = dict_data[key]
else:
decrypted_data[key] = dict_data[key]
else:
decrypted_data[key] = dict_data[key]
else:
for key in dict_data.keys():
if key in encrypt_collection_dict[_collection_name][
MongoEncryptionConstants.key_encrypt_keys] and \
not isinstance(dict_data[key], exclude_encryption_datatypes):
if type(dict_data[key]) is dict:
if 'd' in dict_data[key].keys() and 't' in dict_data[key].keys():
decrypted_data[key] = self.decrypt_convert_proper_data_type(
data=self.create_decrypted_string(payload=dict_data[key]['d']),
data_type=base64.b64decode(dict_data[key]['t'].decode()).decode()
)
else:
decrypted_data[key] = dict_data[key]
else:
decrypted_data[key] = dict_data[key]
else:
decrypted_data[key] = dict_data[key]
else:
decrypted_data = dict_data
if MongoEncryptionConstants.product_encrypted in dict_data and \
dict_data[MongoEncryptionConstants.product_encrypted]:
if "encryption_salt" in dict_data:
for dt in dict_data["encryption_salt"]:
for val_index, val in enumerate(dict_data["encryption_salt"][dt]):
dict_data["encryption_salt"][dt][val_index]['p'] = \
base64.b64decode(dict_data["encryption_salt"][dt][val_index]['p'].decode()).decode()
for dt in dict_data["encryption_salt"]:
for val_index, val in enumerate(sorted(dict_data["encryption_salt"][dt], key=itemgetter('p'))):
to_add = self.add_value_datatype_command(
add_value=dict_data["encryption_salt"][dt][val_index],
var_name="decrypted_data",
value="dict_data[\"encryption_salt\"][dt][val_index]['v']")
exec(to_add)
else:
raise MongoException("Encrypted data does not have encryption salt! Unable to decrypt the data!")
if MongoEncryptionConstants.product_encrypted in decrypted_data:
del decrypted_data[MongoEncryptionConstants.product_encrypted]
if "encryption_salt" in decrypted_data:
del decrypted_data["encryption_salt"]
return decrypted_data
except MongoException as e:
raise MongoException(str(e))
except Exception as e:
raise MongoException("Server faced a problem when decrypting the data: {}".format(str(e)))
def decrypt_keys(self, encrypted_doc, collection_name, key_based=False):
"""
This method loops through the document and decrypts all the keys.
:param encrypted_doc: The document that needs to be decrypted
:param collection_name: The collection to which the document belongs to.
:param key_based: If decryption should be done based on key or on all keys (*)
:return:
"""
try:
decrypted_data = dict()
if key_based:
condition_dict = encrypt_collection_dict[collection_name][MongoEncryptionConstants.key_encrypt_keys]
else:
condition_dict = encrypt_collection_dict[collection_name][
MongoEncryptionConstants.key_exclude_encryption]
for key in encrypted_doc.keys():
if key in condition_dict and not isinstance(encrypted_doc[key], exclude_encryption_datatypes):
if type(encrypted_doc[key]) is dict:
if 'd' in encrypted_doc[key].keys() and 't' in encrypted_doc[key].keys():
decrypted_data[key] = self.decrypt_convert_proper_data_type(
data=self.create_decrypted_string(payload=encrypted_doc[key]['d']),
data_type=base64.b64decode(encrypted_doc[key]['t'].decode()).decode()
)
else:
decrypted_data[key] = encrypted_doc[key]
else:
decrypted_data[key] = encrypted_doc[key]
else:
decrypted_data[key] = encrypted_doc[key]
return decrypted_data
except Exception as e:
raise MongoException("Server faced a problem when decrypting the keys: {}".format(str(e)))
@staticmethod
def decrypt_convert_proper_data_type(data, data_type):
"""
Convert the de-serialized JSON object to the original data-type
:param data: The de-serialized data
:param data_type: The original data type to which the de-serialized data should be converted to
:return: The de-serialized data with it's original data type.
"""
if data_type == "int":
return int(data)
elif data_type == "list":
return data
elif data_type == "dict":
return data
elif data_type == "bool":
# if data == 'true':
# return True
# elif data == 'false':
# return False
# else:
# raise MongoException("Received unknown bool value (only true/false accepted)")
return data
else:
return data.lstrip('"').rstrip('"')
def convert(self, data):
"""
Convert all byte-like objects into the proper data types.
This supports conversion of nested dict, list and tuples.
:param data:
:return:
"""
if isinstance(data, bytes):
return data.decode('ascii')
if isinstance(data, dict):
return dict(map(self.convert, data.items()))
if isinstance(data, tuple):
return map(self.convert, data)
if isinstance(data, list):
return list(map(self.convert, data))
return data
def search_datatype(self, _input, search_type, prev_datapoint_path=''):
"""
Search for an excluded data type in a nested dictionary or list and record it's path in the document.
This does not support the exclusion of data of types dict and list.
:param _input: The input data
:param search_type: The data type to be searched for to exclude.
:param prev_datapoint_path: The path of a value in a nested dict or nested list.
:return: List of dictionaries, with each dictionary containing the true value and it's path.
"""
try:
output = []
current_datapoint = _input
current_datapoint_path = prev_datapoint_path
if search_type is dict:
raise Exception("Searching for datatype dict is not supported!")
elif search_type is list:
raise Exception("Searching for datatype list is not supported!")
else:
if isinstance(current_datapoint, dict):
for dkey in current_datapoint:
temp_datapoint_path = current_datapoint_path
temp_datapoint_path += "dict-{}.".format(dkey)
for index in self.search_datatype(current_datapoint[dkey], search_type, temp_datapoint_path):
output.append(index)
elif isinstance(current_datapoint, list):
for index in range(0, len(current_datapoint)):
temp_datapoint_path = current_datapoint_path
temp_datapoint_path += "list-{}.".format(index)
for index_1 in self.search_datatype(current_datapoint[index], search_type, temp_datapoint_path):
output.append(index_1)
elif isinstance(current_datapoint, search_type):
output.append(dict(p=current_datapoint_path, v=current_datapoint))
output = filter(None, output)
return list(output)
except Exception as e:
raise Exception("Server faced a problem when searching for instances of datatype '{}' --> ".
format(search_type, str(e)))
@staticmethod
def remove_value_of_datatype_command(remove_value, var_name):
"""
This method produces the command for the value to be removed from a nested dict or list,
when given the path of that value in the source variable.
:param remove_value: The value (it's path) to be removed.
:param var_name: The variable on which the exec function should run on to remove the non-serializable value.
:return: The final command that will run in the exec function to remove the value from a nested dict or list.
"""
temp_path = ''
individual_path_list = remove_value["p"].split('.')
individual_path_list.remove('')
if individual_path_list[len(individual_path_list) - 1].split('-')[0] == "dict":
orig_path = 'del {var_name}{path}'
elif individual_path_list[len(individual_path_list) - 1].split('-')[0] == "list":
pop_index = ".pop({})".format(individual_path_list[len(individual_path_list) - 1].split('-')[1])
orig_path = '{var_name}{path}' + pop_index
individual_path_list.pop(len(individual_path_list) - 1)
else:
return
for path_index, path in enumerate(individual_path_list):
if path.split('-')[0] == "dict":
temp_path += "[\"{}\"]".format(path.split('-')[1])
elif path.split('-')[0] == "list":
temp_path += "[{}]".format(path.split('-')[1])
orig_path = orig_path.format(path=temp_path, var_name=var_name)
return orig_path
@staticmethod
def add_value_datatype_command(add_value, var_name, value):
"""
This method produces the command for the value to be added back to a nested dict or list,
when given the path of that value in the source variable.
:param add_value: The value (it's path) to be added
:param var_name: The source variable name on which the exec function should run on.
:param value: The original non-serialized value.
:return: The command to be executed on the source variable.
"""
path_string = ''
temp_path_string = ''
individual_path_list = add_value["p"].split('.')
individual_path_list.remove('')
for path_index, path in enumerate(individual_path_list):
if path.split('-')[0] == "dict":
temp_path_string = "[\"{}\"]".format(path.split('-')[1])
elif path.split('-')[0] == "list":
temp_path_string = "[{}]".format(path.split('-')[1])
else:
raise Exception("Unsupported datatype given for add value")
path_string += temp_path_string
if individual_path_list[len(individual_path_list) - 1].split('-')[0] == "dict":
command = "{var_name}{path} = {value}".format(var_name=var_name, path=path_string, value=value)
elif individual_path_list[len(individual_path_list) - 1].split('-')[0] == "list":
command = "{var_name}{path}].append({value})".format(var_name=var_name,
path=path_string.rstrip(temp_path_string),
value=value)
else:
raise Exception("Unsupported datatype given for add value")
return command
class MongoConnect(MongoDataEncryption):
def __init__(self):
super().__init__()
try:
if config.MONGO_USERNAME and config.MONGO_PASSWORD:
if config.MONGO_AUTHSOURCE and config.MONGO_AUTHMECHANISM:
self.__mongo_OBJ__ = MongoClient(host=config.MONGO_HOST, port=config.MONGO_PORT,
username=config.MONGO_USERNAME, password=config.MONGO_PASSWORD,
authSource=config.MONGO_AUTHSOURCE,
authMechanism=config.MONGO_AUTHMECHANISM)
else:
self.__mongo_OBJ__ = MongoClient(host=config.MONGO_HOST, port=config.MONGO_PORT,
username=config.MONGO_USERNAME, password=config.MONGO_PASSWORD)
else:
self.__mongo_OBJ__ = MongoClient(host=config.MONGO_HOST, port=config.MONGO_PORT)
try:
self.__mongo_OBJ__.admin.command('ismaster')
except ConnectionFailure:
raise MongoConnectionException("iLens was unable to create a connection with the metastore")
logger.debug("Mongo connection established")
except Exception as e:
ConnectionObj.mongo_connection_obj = None
logger.error("Error in establishing connection: " + str(e))
raise MongoConnectionException(MongoEncryptionConstants.MONGO001)
def __del__(self):
"""
To close the mongo connection
:return:
"""
try:
if self.__mongo_OBJ__ is not None:
self.__mongo_OBJ__.close()
logger.trace("Mongo connection closed")
except Exception as e:
logger.error("Error during closing of connection: " + str(e))
raise MongoConnectionException(MONGO007)
def get_mongo_obj(self):
return self.__mongo_OBJ__
def list_database_names(self):
return self.__mongo_OBJ__.list_database_names()
def list_collection_names(self, db_name):
return self.__mongo_OBJ__[db_name].list_collection_names()
def find_with_condition(self, json_data, database_name, collection_name):
try:
database_connection = self.__mongo_OBJ__[database_name]
mongo_response = database_connection[collection_name].find(json_data)
return mongo_response
except Exception as e:
logger.error(f"{MONGO003}: {str(e)}")
raise MongoFindException(f"{MONGO003}: {str(e)}")
def fetch_last_records_with_limit(self, json_data, database_name, collection_name, condition_json, limit=1):
"""
Fetches the latest n records given a condition.
:param json_data: The condition by which the records need to be fetched from MongoDB
:param database_name: The database from which the documents need to be fetched.
:param collection_name: The collection from which the documents need to be fetched.
:param condition_json: The condition to filter the fetched records.
:param limit: The number of records to be found.
:return:
"""
try:
db = self.__mongo_OBJ__[database_name]
mongo_response = list(db[collection_name].find(json_data).sort(condition_json).limit(limit))
logger.debug("Fetched results from mongo")
mongo_response = self.fetch_records_from_object(body=mongo_response,
_collection_name=collection_name)
return mongo_response
except Exception as e:
logger.error(f"{MONGO003}: {str(e)}")
raise MongoFindException(f"{MONGO003}: {str(e)}")
def insert_one(self, json_data, database_name, collection_name):
"""
To insert single document in collection
:param json_data: The document data to be inserted.
:param database_name: The database to which the collection/ document belongs to.
:param collection_name: The collection to which the document belongs to.
:return: id
"""
try:
json_data = self.encrypt_data(json_data=json_data, collection_name=collection_name)
mongo_response = self.__mongo_OBJ__[database_name][collection_name].insert_one(json_data)
logger.debug("Inserted document in mongo")
return mongo_response.inserted_id
except MongoException as e:
raise MongoException(e)
except MongoDuplicateKeyError:
raise MongoDuplicateKeyError("Found an existing record with the same ID in MongoDB")
except Exception as e:
logger.error(f"{MONGO002}: {str(e)}")
raise MongoRecordInsertionException(f"{MONGO002}: {str(e)}")
def distinct_query(self, database_name, collection_name, query_key, filter_json=None):
try:
database_connection = self.__mongo_OBJ__[database_name]
mongo_response = database_connection[collection_name].distinct(query_key, filter_json)
return mongo_response
except Exception as e:
logger.error(f"{MONGO009}: {str(e)}")
raise MongoDistictQueryException(f"{MONGO009}: {str(e)}")
def fetch_paginated_sorted_data(self, _database_name, collection_name, keyword_dict=None, skip=0,
sort_columns=None, find_condition=None, limit=10):
"""
:param find_condition:
:param _database_name:
:param collection_name:
:param keyword_dict:
:param sort_columns:
:param skip
:param limit
:return: response object
"""
try:
# if keyword_dict is None:
# keyword_dict = dict()
if sort_columns is None:
sort_columns = dict()
if find_condition is None:
find_condition = dict()
db = self.__mongo_OBJ__[_database_name]
mongo_response = db[collection_name].find(find_condition).sort(sort_columns).skip(skip).limit(limit)
total_records = db[collection_name].find(find_condition).count()
logger.trace("Fetched results from mongo")
mongo_response = self.fetch_records_from_object(body=mongo_response, _collection_name=collection_name)
return mongo_response, total_records
except Exception as e:
logger.error(f"{MONGO003}: {str(e)}")
raise MongoFindException(f"{MONGO003}: {str(e)}")
def find_with_sort(self, db_name, collection_name, query_json, search_option=None, sort_json=[]):
"""
:param db_name:
:param collection_name:
:param query_json:
:param search_option:
:param sort_json:
:return:
"""
mg_response = {}
try:
docid = self.__mongo_OBJ__[db_name][collection_name]
if search_option:
response = list(docid.find(query_json, search_option).sort(sort_json))
else:
response = list(docid.find(query_json).sort(sort_json))
mg_response = self.fetch_records_from_object(body=response,
_collection_name=collection_name)
except Exception as e:
logger.error(f"{MONGO003}: {str(e)}")
return mg_response
def database_insertion(self, db_name, collection_name, query_json):
"""
To insert single document in collection
:param query_json: The document data to be inserted.
:param db_name: The database to which the collection/ document belongs to.
:param collection_name: The collection to which the document belongs to.
:return: id
"""
try:
json_data = self.encrypt_data(json_data=query_json, collection_name=collection_name)
mongo_response = self.__mongo_OBJ__[db_name][collection_name].insert_one(json_data)
logger.debug("Inserted document in mongo")
return mongo_response.inserted_id
except MongoException as e:
raise MongoException(e)
except MongoDuplicateKeyError:
raise MongoDuplicateKeyError("Found an existing record with the same ID in MongoDB")
except Exception as e:
logger.error(f"{MONGO002}: {str(e)}")
raise MongoRecordInsertionException(f"{MONGO002}: {str(e)}")
def insert_many_records(self, json_data, collection_name, database_name):
try:
json_data = self.encrypt_data(json_data=json_data, collection_name=collection_name)
mongo_response = self.__mongo_OBJ__[database_name][collection_name].insert_many(json_data)
json_mongo_response_object = mongo_response.inserted_ids
return json_mongo_response_object
except Exception as e:
logger.error(f"{MONGO002}: {str(e)}")
raise MongoRecordInsertionException(f"{MONGO002}: {str(e)}")
def record_bulk_remove(self, db_name, collection_name, query_json):
mg_response = {}
try:
docid = self.__mongo_OBJ__[db_name][collection_name]
for key, value in query_json.items():
mg_response = json.dumps(docid.remove({key: value}, multi=True))
except Exception as e:
logger.error(f"{MONGO005}: {str(e)}")
return mg_response
def delete_one_record(self, db_name, collection_name, query_json):
mg_response = {}
try:
docid = self.__mongo_OBJ__[db_name][collection_name]
mg_response = docid.delete_one(query_json)
except Exception as e:
logger.exception("Exception while deleting the single record:" + str(e))
return mg_response
def find_many(self, query_json, db_name, collection_name, search_option=None, limit=None):
"""
:param query_json: The condition on which the documents are to be found.
:param db_name: The database to which the collection/ documents belongs to.
:param collection_name: The collection to which the documents belongs to.
:param search_option: the fields that needs to be included in the mongo response
:param limit: limits the number of records to the given limit
:return:
"""
try:
docid = self.__mongo_OBJ__[db_name][collection_name]
if search_option and limit:
response = list(docid.find(query_json, search_option).limit(limit))
elif search_option:
response = list(docid.find(query_json, search_option))
else:
response = list(docid.find(query_json))
result = self.fetch_records_from_object(body=response,
_collection_name=collection_name)
logger.debug("Fetched results from mongo")
# print("length of mongo response after encryption", len(result))
return result
except Exception as e:
logger.error(f"{MONGO003}: {str(e)}")
raise MongoFindException(f"{MONGO003}: {str(e)}")
def find_one(self, db_name, collection_name, query, search_json=None):
"""
To find single document in collection
:param query: The condition on which the documents are to be found.
:param db_name: The database to which the collection/ documents belongs to.
:param collection_name: The collection to which the documents belongs to.
:param search_json: the fields that needs to be included in the mongo response
:return: response object
"""
try:
db = self.__mongo_OBJ__[db_name]
if search_json:
mongo_response = db[collection_name].find_one(query, search_json)
else:
mongo_response = db[collection_name].find_one(query)
if mongo_response:
mongo_response = [mongo_response]
# print("without decrypted mongo response", mongo_response)
mongo_response = self.fetch_records_from_object(body=mongo_response,
_collection_name=collection_name)
logger.debug("Fetched results from mongo")
# print("decrypted mongo response", mongo_response)
response = mongo_response[0]
return response
else:
return mongo_response
except Exception as e:
logger.error(f"{MONGO003}: {str(e)}")
raise MongoFindException(f"{MONGO003}: {str(e)}")
def find_one_and_replace(self, db_name, collection_name, query, existing_data):
mg_response = {}
try:
docid = self.__mongo_OBJ__[db_name][collection_name]
mg_response = docid.find_one_and_replace(query, existing_data)
except Exception as e:
logger.error(f"{MONGO010}: {str(e)}")
return mg_response
def find_with_sort_and_skip(self, db_name, collection_name, query_json, sort_json, skip, limit, search_option=None):
"""
:param db_name: database name in which collection exists
:param collection_name: collection name in which query applies
:param query_json: query to filter records
:param sort_json: json to sort records based on the fields
:param skip: count to skip the records
:param limit: count to fetch number of records
:param search_option: json to filter fields in the filtered records
:return:
"""
mg_response = []
try:
docid = self.__mongo_OBJ__[db_name][collection_name]
if search_option:
response = docid.find(query_json, search_option).sort(sort_json).skip(skip).limit(limit)
else:
response = docid.find(query_json).sort(sort_json).skip(skip).limit(limit)
mg_response = list(response)
except Exception as e:
logger.error(f"{MONGO003}: {str(e)}")
return mg_response
def search_record_by_query2(self, db_name, collection_name, query_json, search_option=None):
"""
:param db_name:
:param collection_name:
:param query_json:
:param search_option:
:return:
"""
result = {}
try:
docid = self.__mongo_OBJ__[db_name][collection_name]
if search_option:
response = list(docid.find(query_json, search_option))
else:
response = list(docid.find(query_json))
result = self.fetch_records_from_object(body=response,
_collection_name=collection_name)
logger.debug("Fetched results from mongo")
return result
except Exception as e:
logger.error(f"{MONGO003}: {str(e)}")
return result
def find_one_with_condition(self, json_data, database_name, collection_name, search_json=None):
try:
db = self.__mongo_OBJ__[database_name]
if search_json:
mongo_response = db[collection_name].find_one(json_data, search_json)
else:
mongo_response = db[collection_name].find_one(json_data)
if mongo_response:
mongo_response = [mongo_response]
# print("without decrypted mongo response", mongo_response)
mongo_response = self.fetch_records_from_object(body=mongo_response,
_collection_name=collection_name)
logger.debug("Fetched results from mongo")
# print("decrypted mongo response", mongo_response)
response = mongo_response[0]
return response
else:
return mongo_response
except Exception as e:
logger.error(f"{MONGO003}: {str(e)}")
raise MongoFindException(f"{MONGO003}: {str(e)}")
def fetch_all(self, db_name, collection_name):
"""
To find all the documents
:param db_name: The database to which the collection/ documents belongs to.
:param collection_name: The collection to which the documents belongs to.
:return: response object
"""
try:
db = self.__mongo_OBJ__[db_name]
mongo_response = list(db[collection_name].find())
# print(50 * "=")
# print(" length of mongo response without encryption", len(mongo_response))
# print("collection name", collection_name)
mongo_response = self.fetch_records_from_object(body=mongo_response,
_collection_name=collection_name)
# print(" length of mongo response after encryption", len(mongo_response))
logger.debug("Fetched results from mongo")
return mongo_response
except Exception as e:
logger.error(f"{MONGO003}: {str(e)}")
raise MongoFindException(f"{MONGO003}: {str(e)}")
def read(self, json_data, database_name, collection_name):
"""
To find all the documents
:param database_name: The database to which the collection/ documents belongs to.
:param collection_name: The collection to which the documents belongs to.
:param json_data:
:return: response object
"""
try:
db = self.__mongo_OBJ__[database_name]
mongo_response = db[collection_name].find(json_data)
logger.debug("Fetched results from mongo")
return mongo_response
except Exception as e:
logger.error(f"{MONGO003}: {str(e)}")
raise MongoFindException(f"{MONGO003}: {str(e)}")
def find_record_count(self, db_name, collection_name, query_json):
"""
To find all the documents
:param db_name: The database to which the collection/ documents belongs to.
:param collection_name: The collection to which the documents belongs to.
:param query_json:
:return: response object
"""
try:
db = self.__mongo_OBJ__[db_name]
mongo_response = db[collection_name].find(query_json).count()
logger.debug("Fetched results from mongo")
return mongo_response
except Exception as e:
logger.error(f"{MONGO003}: {str(e)}")
raise MongoFindException(f"{MONGO003}: {str(e)}")
def find_json_unencrypted(self, json_data, database_name, collection_name):
"""
To find single document in collection
:param json_data: Find a document without running the decryption module on it.
:param database_name: The database to which the collection/ documents belongs to.
:param collection_name: The collection to which the documents belongs to.
:return: response object
"""
try:
db = self.__mongo_OBJ__[database_name]
mongo_response = db[collection_name].find(json_data)
logger.debug("Fetched results from mongo")
return mongo_response
except Exception as e:
logger.error(f"{MONGO003}: {str(e)}")
raise MongoFindException(f"{MONGO003}: {str(e)}")
def find_all_unencrypted(self, database_name, collection_name):
"""
To find all the documents
:param database_name: The database to which the collection/ documents belongs to.
:param collection_name: The collection to which the documents belongs to.
:return: response object
"""
try:
db = self.__mongo_OBJ__[database_name]
mongo_response = db[collection_name].find()
logger.debug("Fetched results from mongo")
return mongo_response
except Exception as e:
logger.error(f"{MONGO003}: {str(e)}")
raise MongoFindException(f"{MONGO003}: {str(e)}")
def find_json_with_keyword(self, keyword_dict, json_data, database_name, collection_name):
"""
Find and return all documents with selected keywords and a key/ value pair.
:param keyword_dict: The keyword based on which the documents should be fetched.
:param json_data: The key/ value pair based on which the documents should be fetched.
:param database_name: The database to which the collection/ documents belongs to.
:param collection_name: The collection to which the documents belongs to.
:return: List of all the documents.
"""
try:
database_connection = self.__mongo_OBJ__[database_name]
mongo_response = list(database_connection[collection_name].find(json_data, keyword_dict))
mongo_response = self.fetch_records_from_object(body=mongo_response,
_collection_name=collection_name)
logger.debug("Fetched results from mongo")
return mongo_response
except Exception as e:
logger.error(f"{MONGO003}: {str(e)}")
raise MongoFindException(f"{MONGO003}: {str(e)}")
def remove(self, json_data, database_name, collection_name):
"""
To delete document from collection
:param json_data: The key/ value pair based on which the documents should be deleted.
:param database_name: The database to which the collection/ documents belongs to.
:param collection_name: The collection to which the documents belongs to.
:return: Boolean value.
"""
try:
database_connection = self.__mongo_OBJ__[database_name]
mongo_response = database_connection[collection_name].remove(json_data)
logger.debug("Deleted document from mongo. Message: {}".format(mongo_response))
return True
except Exception as e:
logger.error(f"{MONGO004}: {str(e)}")
raise MongoDeleteException(f"{MONGO004}: {str(e)}")
def delete_many(self, database_name, collection_name):
"""
To delete document from collection
:param json_data: The key/ value pair based on which the documents should be deleted.
:param database_name: The database to which the collection/ documents belongs to.
:param collection_name: The collection to which the documents belongs to.
:return: Boolean value.
"""
try:
database_connection = self.__mongo_OBJ__[database_name]
mongo_response = database_connection[collection_name].delete_many()
logger.debug("Deleted document from mongo. Message: {}".format(mongo_response))
return True
except Exception as e:
logger.error(f"{MONGO004}: {str(e)}")
raise MongoDeleteException(f"{MONGO004}: {str(e)}")
def delete_many_with_filter(self, db_name, collection_name, filter_query):
mg_response = {}
try:
docid = self.__mongo_OBJ__[db_name][collection_name]
mg_response = docid.delete_many(filter_query)
except Exception as e:
logger.error(f"{MONGO004}: {str(e)}")
return mg_response
def update_one(self, db_name, collection_name, query, set_json, upsert=False):
"""
To update single document
:param query: The condition by which the documents should be updated.
:param set_json: The JSON data that should replace the previous document.
:param db_name: The database to which the collection/ documents belongs to.
:param collection_name: The collection to which the documents belongs to.
:param upsert: which will insert new record if no record exists with the given query.
:return: success
"""
try:
database_connection = self.__mongo_OBJ__[db_name]
if "$set" in set_json:
json_data = self.encrypt_data(json_data=set_json["$set"], collection_name=collection_name)
database_connection[collection_name].update_one(query, {"$set": json_data}, upsert=upsert)
else:
json_data = self.encrypt_data(json_data=set_json, collection_name=collection_name)
database_connection[collection_name].update_one(query, {"$set": json_data}, upsert=upsert)
logger.debug("Updated document from mongo")
return True
except Exception as e:
logger.error(f"{MONGO005}: {str(e)}")
raise MongoUpdateException(f"{MONGO005}: {str(e)}")
def update_many_values(self, db_name, collection_name, query, set_json, upsert=False):
"""
Definition for updating one record in mongo according to the query
:param upsert:
:param db_name:
:param collection_name:
:param query:
:param set_json:
:return:
"""
mg_response = {}
try:
database_connection = self.__mongo_OBJ__[db_name]
if "$set" in set_json:
json_data = self.encrypt_data(json_data=set_json["$set"], collection_name=collection_name)
database_connection[collection_name].update_many(query, {"$set": json_data}, upsert=upsert)
else:
json_data = self.encrypt_data(json_data=set_json, collection_name=collection_name)
database_connection[collection_name].update_many(query, {"$set": json_data}, upsert=upsert)
# mg_response = docid.update_many(query, {"$set": set_json}, upsert=upsert)
except Exception as es:
logger.exception(es)
return mg_response
def update_many(self, db_name, collection_name, query, set_json, upsert=False):
"""
Definition for updating one record in mongo according to the query
:param upsert:
:param db_name:
:param collection_name:
:param query:
:param set_json:
:return:
"""
mg_response = {}
try:
database_connection = self.__mongo_OBJ__[db_name]
if "$set" in set_json:
json_data = self.encrypt_data(json_data=set_json["$set"], collection_name=collection_name)
database_connection[collection_name].update_one(query, {"$set": json_data}, upsert=upsert)
else:
json_data = self.encrypt_data(json_data=set_json, collection_name=collection_name)
database_connection[collection_name].update_one(query, {"$set": json_data}, upsert=upsert)
logger.debug("Updated document from mongo")
return True
except Exception as e:
logger.error(f"{MONGO005}: {str(e)}")
return mg_response
def data_base_update_by_query(self, db_name, collection_name, query, set_json, upsert=False):
"""
Definition for updating one record in mongo according to the query
:param upsert:
:param db_name:
:param collection_name:
:param query:
:param set_json:
:return:
"""
mg_response = {}
try:
docid = self.__mongo_OBJ__[db_name][collection_name]
if "$set" in set_json:
json_data = self.encrypt_data(json_data=set_json["$set"], collection_name=collection_name)
docid[collection_name].update_one(query, {"$set": json_data})
else:
json_data = self.encrypt_data(json_data=set_json, collection_name=collection_name)
docid[collection_name].update_one(query, {"$set": json_data})
except Exception as e:
logger.error(f"{MONGO005}: {str(e)}")
return mg_response
def update(self, condition, json_data, database_name, collection_name):
"""
To update single document
:param condition: The condition by which the documents should be updated.
:param json_data: The JSON data that should replace the previous document.
:param database_name: The database to which the collection/ documents belongs to.
:param collection_name: The collection to which the documents belongs to.
:return: success
"""
try:
database_connection = self.__mongo_OBJ__[database_name]
if "$set" in json_data:
json_data = self.encrypt_data(json_data=json_data["$set"], collection_name=collection_name)
database_connection[collection_name].update_one(condition, {"$set": json_data})
else:
json_data = self.encrypt_data(json_data=json_data, collection_name=collection_name)
database_connection[collection_name].update_one(condition, {"$set": json_data})
logger.debug("Updated document from mongo")
return True
except Exception as e:
logger.error(f"{MONGO005}: {str(e)}")
raise MongoUpdateException(f"{MONGO005}: {str(e)}")
def aggregate_query(self, json_data, database_name, collection_name):
"""
To search using aggregate query
:param json_data:
:param database_name: The database to which the collection/ documents belongs to.
:param collection_name: The collection to which the documents belongs to.
:return: response object
"""
try:
database_connection = self.__mongo_OBJ__[database_name]
mongo_response = database_connection[collection_name].aggregate(json_data)
logger.debug("Fetched results from mongo")
return mongo_response
except Exception as e:
logger.error(f"{MONGO006}: {str(e)}")
raise MongoQueryException(f"{MONGO006}: {str(e)}")
def aggregate(self, db_name, collection_name, list_for_aggregation):
"""
:param db_name:
:param collection_name:
:param list_for_aggregation:
:return:
"""
mg_response = dict()
try:
docid = self.__mongo_OBJ__[db_name][collection_name]
mg_response = docid.aggregate(list_for_aggregation)
except Exception as e:
logger.error(f"{MONGO006}: {str(e)}")
return mg_response
def close_connection(self):
"""
To close the mongo connection
:return:
"""
try:
if self.__mongo_OBJ__ is not None:
self.__mongo_OBJ__.close()
logger.debug("Mongo connection closed")
except Exception as e:
logger.error(f"{MONGO007}: {str(e)}")
raise MongoConnectionException(f"{MONGO007}: {str(e)}")
def find_item_containing_key_in_sub_json_object(self, condition_array, database_name, collection_name):
"""
This function return item which contains provided JSON key inside sub json of mongodb record.
:param: condition_array:
:param: database_name: The database to which the collection/ documents belongs to.
:param: collection_name: The collection to which the documents belongs to.
:return: This function return item which contains provided JSON key inside sub json of mongodb record.
"""
try:
database_connection = self.__mongo_OBJ__[database_name]
mongodb_response = database_connection[collection_name].find({"$or": condition_array})
mongodb_response = list(mongodb_response)[0]
# mongo_response = self.fetch_records_from_object(body=mongodb_response, collection_name=collection_name)
return mongodb_response
except Exception as e:
traceback.print_exc()
logger.error(f"{MONGO003}: {str(e)}")
raise MongoFindException(f"{MONGO003}: {str(e)}")
def find_item_containing_key_in_sub_json_object_list(self, condition_array, database_name, collection_name):
"""
This function return item which contains provided JSON key inside sub json of mongodb record.
:param: condition_array:
:param: database_name: The database to which the collection/ documents belongs to.
:param: collection_name: The collection to which the documents belongs to.
:return: This function return item which contains provided JSON key inside sub json of mongodb record.
"""
try:
database_connection = self.__mongo_OBJ__[database_name]
mongodb_response = database_connection[collection_name].find({"$or": condition_array})
mongodb_response = list(mongodb_response)
# mongo_response = self.fetch_records_from_object(body=mongodb_response, collection_name=collection_name)
return mongodb_response
except Exception as e:
traceback.print_exc()
logger.error(f"{MONGO003}: {str(e)}")
raise MongoFindException(f"{MONGO003}: {str(e)}")
def search_record_by_query(self, db_name, collection_name, query_json, search_option=None):
"""
Definition for searching the record by query json
:param db_name: The database to which the collection/ documents belongs to.
:param collection_name: The collection to which the documents belongs to.
:param query_json:
:param search_option:
:return:
"""
mg_response = {}
try:
docid = self.__mongo_OBJ__[db_name][collection_name]
if search_option:
response = list(docid.find(query_json, search_option))
else:
response = list(docid.find(query_json))
mg_response = self.fetch_records_from_object(response, _collection_name=collection_name)
except Exception as e:
logger.error(f"{MONGO005}: {str(e)}")
return mg_response
def fetch_records_from_object(self, body, _collection_name):
"""
Definition for fetching the record from object
:param body:
:param _collection_name:
:return:
"""
final_list = []
try:
# for doc in body:
# print(" doc",doc)
# final_json = doc
# final_json = self.decrypt_data(dict_data=final_json, _collection_name=_collection_name)
# print(" finaj json:",final_json)
# final_list.append(final_json)
collection_name = [_collection_name] * len(body)
# print(" body",body)
# print(" collection name",collection_name)
with ThreadPoolExecutor(max_workers=MongoEncryptionConstants.max_docs_per_batch) as executor:
final_list = executor.map(self.decrypt_data, body, collection_name)
final_list = list(final_list)
except Exception as e:
logger.error(f"{MONGO005}: {str(e)}")
return list(final_list)
@staticmethod
def object_id_deserializer(result_dict: dict):
"""
Definition for de-serializing object of type ObjectID found in results retrieved from Mongo DB
:param result_dict:
:return:
"""
try:
for key, value in result_dict.items():
if isinstance(value, ObjectId):
result_dict[key] = str(value)
return result_dict
except Exception as e:
logger.error(f"{MONGO011}: {str(e)}")
raise MongoObjectDeserializationException(f"{MONGO011}: {str(e)}")
def find_count(self, json_data, database_name, collection_name):
"""
:param json_data:
:param database_name: The database to which the collection/ documents belongs to.
:param collection_name: The collection to which the documents belongs to.
:return:
"""
try:
db = self.__mongo_OBJ__[database_name]
mongo_response = db[collection_name].find(json_data).count()
logger.debug("fetched result count from mongo")
return mongo_response
except Exception as e:
logger.error(f"{MONGO003}: {str(e)}")
raise MongoFindException(f"{MONGO005}: {str(e)}")
def skip_docs(self, json_data, database_name, collection_name, condition_json, skip):
"""
:param json_data:
:param database_name: The database to which the collection/ documents belongs to.
:param collection_name: The collection to which the documents belongs to.
:param condition_json:
:param skip:
:return:
"""
try:
db = self.__mongo_OBJ__[database_name]
mongo_response = db[collection_name].find(json_data).sort(condition_json).skip(skip)
logger.debug("Fetched results from mongo")
return mongo_response
except Exception as e:
logger.error(f"{MONGO003}: {str(e)}")
raise MongoFindException(f"{MONGO003}: {str(e)}")
def find_util(self, **kwargs):
try:
database_name = kwargs.get('database_name', None)
collection_name = kwargs.get('collection_name', None)
find_condition = kwargs.get('find_condition', dict())
select_condition = kwargs.get('select_condition', None)
sort_condition = kwargs.get('sort_condition', None)
skip = kwargs.get('skip', 0)
limit = kwargs.get('limit', None)
db = self.__mongo_OBJ__[database_name]
if select_condition:
mongo_response = db[collection_name].find(find_condition, select_condition)
else:
mongo_response = db[collection_name].find(find_condition)
if sort_condition is not None:
mongo_response = mongo_response.sort(sort_condition)
if skip:
mongo_response = mongo_response.skip(skip=skip)
if limit is not None:
mongo_response = mongo_response.limit(limit=limit)
# total_records = db[collection_name].find(find_condition).count()
logger.trace("Fetched results from mongo")
mongo_response = self.fetch_records_from_object(body=mongo_response, _collection_name=collection_name)
return mongo_response
except Exception as e:
logger.error(f"{MONGO003}: {str(e)}")
raise MongoFindException(f"{MONGO005}: {str(e)}")
def encrypt_unencrypted_docs(self, _database_name):
"""
This method encrypts all the documents in configured collections that are already present in MongoDB.
:param _database_name: The database to which the collection/ documents belongs to.
:return: True/ False based on the outcome of the process.
"""
try:
for collection in encrypt_collection_dict.keys():
all_docs = self.find_all_unencrypted(database_name=_database_name,
collection_name=collection)
for doc in all_docs:
if MongoEncryptionConstants.product_encrypted in doc and \
doc[MongoEncryptionConstants.product_encrypted]:
logger.debug("Document already encrypted by Product. Skipping...")
continue
if "_id" in doc:
_id = doc["_id"]
del doc["_id"]
else:
logger.warning("'_id' not found in document. Skipping...")
continue
logger.debug("Updating document with Product encryption")
try:
self.update_one(query={"_id": _id},
set_json=doc,
db_name=_database_name,
collection_name=collection)
except Exception as e:
logger.error("Server faced problem when encrypting document with ID {} in collection {} -> {}".
format(_id, collection, str(e)))
continue
logger.debug("Document updated with iLens encryption")
return True
except Exception as e:
logger.error("Server faced a problem when encrypting existing metadata to iLens encryption formats --> {}".
format(str(e)))
return False
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment