Commit 6d69a778 authored by harshavardhan.c's avatar harshavardhan.c

Completed one version services for category configuration for support lens

parents
[service]
name = ilens
host= 0.0.0.0
port=8585
interface=eth0
SECRET_KEY= KLKey
enable_security= False
cookie_max_age_in_mins= 30
max_attempts = 5
lockout_time_interval = 60
httpflag = True
secureflag = False
reset_time_interval = 0.5
apply_processor_count = False
workers = 3
threads = 6
ip_check = False
[log]
file_name= ilens
path= logs/
level=DEBUG
handler= rotating_file_handler
max_bytes= 10000000
back_up_count= 10
[mongo_db]
host= 192.168.0.220
port= 2717
username=
password=
authSource=
authMechanism=
mongo_constants_file_path= conf/mongo_encryption_constants.json
[kairos_db]
url= http://ilens_kairos:8080
[system_login]
domain_list= ["knowledgelens.com"]
[csv_conf]
csv_path= Log/csv/
upload_path= Log/UPLOAD
[schedule_rule_engine]
url= http://localhost:9997/create_job
[profile_pic_path]
base_path= templates/profile_pic/
[email_default_baseurl]
base_url= https://app.ilens.io/cloud/send_mail
[ai_rules]
enabled= true
docker_endpoint= tcp://localhost:4243
[flow_model]
container_url= http://localhost:8180
[IMAGE]
path= images/
[LICENSE_PATH]
path= license/
[pipeline_internal]
mqtt_broker_host= 192.168.0.220
mqtt_broker_port= 1883
mqtt_broker_ssl= false
mqtt_broker_conn_type = tcp
mqtt_broker_ws_port= 8083
node_intermediate = mqtt
queue_host= 192.168.0.220
queue_port= 9092
[channel_pp_debug_node]
host= 192.168.0.220
port= 1883
topic = ilens/pipeline/debug
user_name =
password =
[AGENT]
manager_url = http://192.168.0.220/ilens_api
[REDIS]
host= 192.168.0.220
port= 6379
key_expiry= 100
rules_db=0
alarms_db=1
live_tags_db=4
audit_db = 6
audit_queue= audit
[ILENS_VERSION]
version = v5.1
[DATA_PROCESSOR]
host = localhost
connection = tcp
port = 1883
topic = ilens/monitor/live/device_monitor_alarms
[LICENSE_SERVER]
host = 192.168.0.220
port = 9816
[service]
name = ilens
host= 0.0.0.0
port=8585
interface=eth0
SECRET_KEY= KLKey
enable_security= False
cookie_max_age_in_mins= 30
max_attempts = 5
lockout_time_interval = 60
httpflag = True
secureflag = False
reset_time_interval = 0.5
workers = 1
[log]
file_name= ilens
path= logs/
level=DEBUG
handler= rotating_file_handler
max_bytes= 10000000
back_up_count= 10
[mongo_db]
host= #MONGO_HOST#
port= 2717
username=
password=
authSource=
authMechanism=
mongo_constants_file_path= conf/mongo_encryption_constants.json
[kairos_db]
url= http://#KAIROS_CASSANDRA_HOST#:8080
[system_login]
domain_list= ["knowledgelens.com"]
[csv_conf]
csv_path= Log/csv/
upload_path= Log/UPLOAD
[schedule_rule_engine]
url= http://localhost:9997/create_job
[profile_pic_path]
base_path= templates/profile_pic/
[email_default_baseurl]
base_url= https://app.ilens.io/cloud/send_mail
[ai_rules]
enabled= true
docker_endpoint= tcp://localhost:4243
[flow_model]
container_url= http://localhost:8180
[IMAGE]
path= /images/metaservice/
[LICENSE_PATH]
path= license/
[pipeline_internal]
mqtt_broker_host= #MQTT_HOST#
mqtt_broker_port= 1883
mqtt_broker_ssl= false
mqtt_broker_conn_type = tcp
mqtt_broker_ws_port= 8083
node_intermediate= queue
queue_host= #KAFKA_HOST#
queue_port= 9092
[channel_pp_debug_node]
host= #CHANNEL_PP#
port= 1883
topic = ilens/pipeline/debug
user_name =
password =
[AGENT]
manager_url = http://#AGENT_URL#/ilens_api
[REDIS]
host= #REDIS_HOST#
port= 6379
key_expiry= 100
rules_db=0
alarms_db=1
live_tags_db=4
audit_db = 6
audit_queue= audit
[ILENS_VERSION]
version = v5.1
[DATA_PROCESSOR]
host = #DATA_PROCESSOR_HOST#
connection = tcp
port = 1883
topic = ilens/monitor/live/device_monitor_alarms
\ No newline at end of file
[service]
name = ilens
host= 0.0.0.0
port=8585
interface=eth0
SECRET_KEY= KLKey
enable_security= False
cookie_max_age_in_mins= 30
max_attempts = 5
lockout_time_interval = 60
httpflag = True
secureflag = False
reset_time_interval = 0.5
apply_processor_count = False
workers = 3
threads = 6
ip_check = False
[log]
file_name= ilens
path= logs/
level=DEBUG
handler= rotating_file_handler
max_bytes= 10000000
back_up_count= 10
[mongo_db]
host= 192.168.0.220
port= 2717
username=
password=
authSource=
authMechanism=
mongo_constants_file_path= conf/mongo_encryption_constants.json
[kairos_db]
url= http://ilens_kairos:8080
[system_login]
domain_list= ["knowledgelens.com"]
[csv_conf]
csv_path= Log/csv/
upload_path= Log/UPLOAD
[schedule_rule_engine]
url= http://localhost:9997/create_job
[profile_pic_path]
base_path= templates/profile_pic/
[email_default_baseurl]
base_url= https://app.ilens.io/cloud/send_mail
[ai_rules]
enabled= true
docker_endpoint= tcp://localhost:4243
[flow_model]
container_url= http://localhost:8180
[IMAGE]
path= images/
[LICENSE_PATH]
path= license/
[pipeline_internal]
mqtt_broker_host= beta.ilens.io
mqtt_broker_port= 1883
mqtt_broker_ssl= false
mqtt_broker_conn_type = tcp
mqtt_broker_ws_port= 8083
node_intermediate= mqtt
queue_host= 192.168.0.220
queue_port= 9092
[channel_pp_debug_node]
host= 192.168.0.220
port= 1883
topic = ilens/pipeline/debug
user_name =
password =
[AGENT]
manager_url = http://192.168.0.220/ilens_api
[REDIS]
host= localhost
port= 6379
key_expiry= 100
rules_db=0
alarms_db=1
live_tags_db=4
audit_db = 6
audit_queue= audit
[ILENS_VERSION]
version = v5.1
[DATA_PROCESSOR]
host = localhost
connection = tcp
port = 1883
topic = ilens/monitor/live/device_monitor_alarms
[LICENSE_SERVER]
host = 192.168.0.220
port = 9816
{
"encrypt_collection_dict" : {
"user": {
"encrypt_keys": ["phonenumber"],
"exclude_encryption": ["_id", "user_id", "client_id", "deleted_by","created_by", "username", "isdeleted", "email"]}
}
}
[service]
name = ilens
host= 0.0.0.0
port=8585
interface=eth0
SECRET_KEY= KLKey
enable_security= False
cookie_max_age_in_mins= 30
max_attempts = 5
lockout_time_interval = 60
httpflag = True
secureflag = False
reset_time_interval = 0.5
apply_processor_count = False
workers = 3
threads = 2
ip_check = False
[log]
file_name= ilens
path= logs/
level=DEBUG
handler= rotating_file_handler
max_bytes= 10000000
back_up_count= 10
[mongo_db]
host= 192.168.0.220
port= 2717
username=
password=
authSource=
authMechanism=
mongo_constants_file_path= conf/mongo_encryption_constants.json
[kairos_db]
url= http://ilens_kairos:8080
[system_login]
domain_list= ["knowledgelens.com"]
[csv_conf]
csv_path= Log/csv/
upload_path= Log/UPLOAD
[schedule_rule_engine]
url= http://localhost:9997/create_job
[profile_pic_path]
base_path= templates/profile_pic/
[email_default_baseurl]
base_url= https://app.ilens.io/cloud/send_mail
[ai_rules]
enabled= true
docker_endpoint= tcp://localhost:4243
[flow_model]
container_url= http://localhost:8180
[IMAGE]
path= images/
[LICENSE_PATH]
path= license/
[pipeline_internal]
mqtt_broker_host= beta.ilens.io
mqtt_broker_port= 1883
mqtt_broker_ssl= false
mqtt_broker_conn_type = tcp
mqtt_broker_ws_port= 8083
node_intermediate= queue
queue_host= 192.168.0.210
queue_port= 9092
[channel_pp_debug_node]
host= 192.168.0.220
port= 1883
topic = ilens/pipeline/debug
user_name =
password =
[AGENT]
manager_url = http://192.168.0.220/ilens_api
[REDIS]
host= localhost
port= 6379
key_expiry= 100
rules_db=0
alarms_db=1
live_tags_db=4
audit_db = 6
audit_queue= audit
[ILENS_VERSION]
version = v5.1
[DATA_PROCESSOR]
host = localhost
connection = tcp
port = 1883
topic = ilens/monitor/live/device_monitor_alarms
[service]
name = ilens
host= 0.0.0.0
port=9090
interface=eth0
SECRET_KEY= KLKey
enable_security= False
cookie_max_age_in_mins= 30
max_attempts = 5
lockout_time_interval = 60
httpflag = True
secureflag = False
reset_time_interval = 0.5
apply_processor_count = False
workers = 5
threads = 6
ip_check = False
[log]
file_name= ilens
path= logs/
level=DEBUG
handler= rotating_file_handler
max_bytes= 10000000
back_up_count= 10
[mongo_db]
host= 192.168.0.207
port= 2717
username=
password=
authSource=
authMechanism=
mongo_constants_file_path= conf/mongo_encryption_constants.json
[csv_conf]
csv_path= Log/csv/
upload_path= Log/UPLOAD
[REDIS]
host= 192.168.0.207
port= 6379
key_expiry= 100
rules_db=0
alarms_db=1
live_tags_db=4
audit_db = 6
audit_queue= audit
[service]
name = ilens
host= 0.0.0.0
port=8585
interface=eth0
SECRET_KEY= KLKey
enable_security= False
cookie_max_age_in_mins= 30
max_attempts = 5
lockout_time_interval = 60
httpflag = True
secureflag = False
reset_time_interval = 0.5
apply_processor_count = False
workers = 5
threads = 6
ip_check = False
[log]
file_name= ilens
path= logs/
level=DEBUG
handler= rotating_file_handler
max_bytes= 10000000
back_up_count= 10
[mongo_db]
host= 10.0.0.5
port= 2717
username=
password=
authSource=
authMechanism=
mongo_constants_file_path= conf/mongo_encryption_constants.json
[kairos_db]
url= http://10.0.0.5:8080
[system_login]
domain_list= ["knowledgelens.com"]
[csv_conf]
csv_path= Log/csv/
upload_path= Log/UPLOAD
[schedule_rule_engine]
url= http://localhost:9997/create_job
[profile_pic_path]
base_path= templates/profile_pic/
[email_default_baseurl]
base_url= https://app.ilens.io/cloud/send_mail
[ai_rules]
enabled= true
docker_endpoint= tcp://localhost:4243
[flow_model]
container_url= http://localhost:8180
[IMAGE]
path= /images/metaservices/
[LICENSE_PATH]
path= license/
[pipeline_internal]
mqtt_broker_host= 10.0.0.5
mqtt_broker_port= 1883
mqtt_broker_ssl= false
mqtt_broker_conn_type = tcp
mqtt_broker_ws_port= 8083
node_intermediate= queue
queue_host= 10.0.0.5
queue_port= 9092
[channel_pp_debug_node]
host= 10.0.0.5
port= 1883
topic = ilens/pipeline/debug
user_name =
password =
[AGENT]
manager_url = http://10.0.0.5/ilens_api
[REDIS]
host= 172.31.47.241
port= 6379
key_expiry= 100
rules_db=0
alarms_db=1
live_tags_db=4
audit_db = 6
audit_queue= audit
[ILENS_VERSION]
version = v5.1
[DATA_PROCESSOR]
host = 172.31.47.241
connection = tcp
port = 1883
topic = ilens/monitor/live/device_monitor_alarms
[LICENSE_SERVER]
host = 10.0.0.5
port = 9816
# This is a sample Python script.
# Press Shift+F10 to execute it or replace it with your code.
# Press Double Shift to search everywhere for classes, files, tool windows, actions, and settings.
def print_hi(name):
# Use a breakpoint in the code line below to debug your script.
print(f'Hi, {name}') # Press Ctrl+F8 to toggle the breakpoint.
# Press the green button in the gutter to run the script.
if __name__ == '__main__':
print_hi('PyCharm')
# See PyCharm help at https://www.jetbrains.com/help/pycharm/
import configparser
import json
import os
# read config file based on already set environment variable : APP_ENV
# If APP_ENV is null default configuration file is used
# New application config file should be in folder with same name as environment variavble
APP_ENV = os.environ.get('APP_ENV')
if not APP_ENV:
APP_ENV = 'app'
config = configparser.ConfigParser()
CONFIGURATION_FILE = f"conf/{APP_ENV}/application.conf"
config.read(CONFIGURATION_FILE)
if not config.read(CONFIGURATION_FILE):
APP_ENV = "dev"
CONFIGURATION_FILE = f"conf/{APP_ENV}/application.conf"
config.read(CONFIGURATION_FILE)
"""
Configuration module
"""
"""
Service Info
"""
SERVICE_NAME = config["service"]["name"]
SERVICE_HOST = config["service"]["host"]
SERVICE_PORT = config.getint("service", "port")
SERVICE_SECRET_KEY = config["service"]["SECRET_KEY"]
SERVICE_ENABLE_SECURITY = config.getboolean("service", "enable_security", fallback=False)
SERVICE_INTERFACE = str(config["service"]["interface"])
DEVICE_STATUS_TIME = config.getint("service", "device_status_time_minutes", fallback=1)
APPLY_PROCESSOR_COUNT = config.getboolean("service", "apply_processor_count", fallback=False)
workers = config.getint("service", "workers", fallback=1)
threads = config.getint("service", "threads", fallback=1)
ip_check = config.getboolean("service", "ip_check", fallback=False)
cookie_max_age = config.getint("service", "cookie_max_age_in_mins", fallback=60)
max_attempts = config.getint("service", "max_attempts")
lockout_time_interval = config.getint("service", "lockout_time_interval")
reset_time_interval = config.getfloat("service", "reset_time_interval")
http_flag = config.getboolean('service', 'httpflag')
secure_flag = config.getboolean('service', 'secureflag')
"""
Log Info
"""
LOG_FILE_NAME = config["log"]["file_name"]
LOG_PATH = config["log"]["path"]
LOG_LEVEL = config["log"]["level"]
LOG_HANDLER = config["log"]["handler"]
LOG_MAX_BYTES = config.getint("log", "max_bytes")
LOG_BACKUP_COUNT = config.getint("log", "back_up_count")
"""
Mongo Info
"""
MONGO_HOST = config["mongo_db"]["host"]
MONGO_PORT = config.getint("mongo_db", "port", fallback=2717)
MONGO_USERNAME = config["mongo_db"]["username"]
MONGO_PASSWORD = config["mongo_db"]["password"]
MONGO_AUTHSOURCE = config["mongo_db"]["authSource"]
MONGO_AUTHMECHANISM = config["mongo_db"]["authMechanism"]
encryption_constants_file_path = config["mongo_db"]["mongo_constants_file_path"]
"""
Kairos Info
"""
KAIROS_DB_URL = config["kairos_db"]["url"]
"""
Additional info
"""
SYSTEM_LOGIN_DOMAIN_DOMAIN_LIST = json.loads(config["system_login"]["domain_list"])
CSV_PATH = config["csv_conf"]["csv_path"]
UPLOAD_CSV_PATH = config["csv_conf"]["upload_path"]
PROFILE_PIC_PATH = config["profile_pic_path"]["base_path"]
"""
Manual Entry conf
"""
SCHEDULER_BASE_URL = config["schedule_rule_engine"]["url"]
"""
Upload Parser
"""
# upload_parser = config["upload_parser"]
"""
Default email sender
"""
default_email_url = config["email_default_baseurl"]["base_url"]
"""
AI Rule : Auto Container Deployment Service
"""
AI_RULE_STATUS = config["ai_rules"]["enabled"]
if AI_RULE_STATUS.lower() == "true":
AI_RULE_DOCKER_ENDPOINT = config["ai_rules"]["docker_endpoint"]
else:
AI_RULE_DOCKER_ENDPOINT = "localhost"
CONTAINER_URL = config["flow_model"]["container_url"]
IMAGE_PATH = config.get('IMAGE', 'path', fallback="images/")
LICENSE_PATH = config.get('LICENSE_PATH', 'path', fallback="license/")
PIPELINE_INTERNAL_SECTION = 'pipeline_internal'
PIPELINE_INTERNAL_CONFIGURATION = {
"mqtt_broker_host": config.get(PIPELINE_INTERNAL_SECTION, 'mqtt_broker_host', fallback='localhost'),
"mqtt_broker_port": config.get(PIPELINE_INTERNAL_SECTION, 'mqtt_broker_port', fallback='1883'),
"mqtt_broker_ssl": config.get(PIPELINE_INTERNAL_SECTION, 'mqtt_broker_ssl', fallback='false'),
"mqtt_broker_conn_type": config.get(PIPELINE_INTERNAL_SECTION, 'mqtt_broker_conn_type', fallback='tcp'),
"mqtt_broker_ws_port": config.get(PIPELINE_INTERNAL_SECTION, 'mqtt_broker_ws_port', fallback='8083'),
"kafka_broker": config.get(PIPELINE_INTERNAL_SECTION, 'queue_host', fallback='localhost'),
"kafka_port": config.get(PIPELINE_INTERNAL_SECTION, 'queue_port', fallback='localhost'),
"broker_ssl_path": config.get(PIPELINE_INTERNAL_SECTION, 'broker_ssl_path', fallback='/opt/ssl'),
"node_intermediate": 'kafka' if config.get(PIPELINE_INTERNAL_SECTION, 'node_intermediate',
fallback='kafka') == 'queue' else "mqtt"
}
CHANNEL_PIPELINE_CONFIG = config['channel_pp_debug_node']
MQTT_BROKER_HOST = config['pipeline_internal']["mqtt_broker_host"]
MQTT_BROKER_PORT = config['pipeline_internal']["mqtt_broker_port"]
MQTT_BROKER_SSL = config['pipeline_internal']["mqtt_broker_ssl"]
MQTT_BROKER_CONN_TYPE = config['pipeline_internal']["mqtt_broker_conn_type"]
MQTT_BROKER_WS_PORT = config['pipeline_internal']["mqtt_broker_ws_port"]
BROKER_SSL_PATH = PIPELINE_INTERNAL_CONFIGURATION.get("broker_ssl_path", '/opt/ssl')
MANAGER_URL = config["AGENT"]["manager_url"]
DEVICE_MODE = config.get('AGENT', 'device_mode', fallback='agent')
SSL_CERT_PATH = config.get('AGENT', 'ssl_base_path', fallback='/opt/ssl')
TIME_SYNC_URL = config.get('AGENT', 'time_sync_url', fallback=None)
TIME_SYNC_URL_CERT_PATH = config.get('AGENT', 'cert_path', fallback=False)
# Redis Details
redis_host = str(config["REDIS"]["host"])
redis_port = int(config["REDIS"]["port"])
key_expiry_in_secs = int(config["REDIS"]["key_expiry"])
rules_redis_db = int(config["REDIS"]["rules_db"])
alarms_redis_db = int(config["REDIS"]["alarms_db"])
live_tags_db = int(config["REDIS"]["live_tags_db"])
audit_db = int(config["REDIS"]["audit_db"])
audit_queue = str(config["REDIS"]["audit_queue"])
#iLens version
version = str(config["ILENS_VERSION"]["version"])
# Data Processor configs
data_processor_host = config.get('DATA_PROCESSOR', 'host', fallback=None)
data_processor_conn_type = config.get('DATA_PROCESSOR', 'connection', fallback='tcp')
data_processor_ssl = bool(config.get('DATA_PROCESSOR', 'ssl_enabled', fallback=False))
data_processor_port = int(config.get('DATA_PROCESSOR', 'port', fallback=1883))
data_processor_topic = str(config.get('DATA_PROCESSOR', 'topic', fallback=None))
LICENSE_SERVER = config.get('LICENSE_SERVER','host', fallback="localhost")
LICENSE_PORT = config.get('LICENSE_SERVER','port', fallback=9816)
\ No newline at end of file
class Endpoints:
base_url = "/support_lens"
category_list = base_url + "/category/list"
category_save = base_url + "/category/save"
category_edit = base_url + "/category/edit"
category_fetch = base_url + "/category/fetch"
class DBMapping:
# DBConstants
support_lens_configuration = "supportlens_configuration"
# CollectionConstants
category_configuration = "category_configuration"
subcategory_configuration = "subcategory_configuration"
class StatusMessages:
SUCCESS = "success"
FAILED = "failed"
CATEGORY_LIST = "Failed to fetch category list"
CATEGORY_DATA = "Failed to save category data"
CATEGORY_DELETE = "Failed to delete category data"
CATEGORY_FETCH = "Failed to fetch category data"
class StaticJsons:
CATEGORY_HEADERCONTENT = [
{
"label": "Category Name",
"key": "category_name"
},
{
"label": "Description",
"key": "description"
},
{
"label": "Sub Categories",
"key": "sub_categories"
}
]
class ConnectionObj:
def __init__(self):
self.mongo_connection_obj = None
ConnectionObj = ConnectionObj()
import time
from copy import deepcopy
from scripts.config.app_constants import StatusMessages, StaticJsons, DBMapping
from scripts.config.db_connection_obj import ConnectionObj
from scripts.logging.logger import logger
from scripts.utils.get_new_id import GetNewId
from scripts.utils.mongo_utility import MongoConnect
class SupportLensHandler:
def __init__(self):
try:
logger.debug("Inside the Support lens Handler module")
self.new_id = GetNewId()
self.mongo_obj = ConnectionObj.mongo_connection_obj
if not ConnectionObj.mongo_connection_obj:
self.mongo_obj = ConnectionObj.mongo_connection_obj = MongoConnect()
except Exception as e:
logger.exception("Exception in the data utility definition" + str(e))
def get_category_list(self, input_json):
final_json = {"status": StatusMessages.FAILED,
"data": dict(headerContent=StaticJsons.CATEGORY_HEADERCONTENT, bodyContent=list())}
logger.debug("Inside get_category_list definition")
try:
records = list(self.mongo_obj.aggregate(
db_name=DBMapping.support_lens_configuration,
collection_name=DBMapping.category_configuration,
list_for_aggregation=[
{
'$match': {
'project_id': input_json["project_id"]
}
}, {
'$project': {
'_id': 0,
'category_id': 'category_id',
'category_name': 'CategoryName',
"sub_categories": '$SubCategories',
'description': '$Description'
}
}
]))
for each_record in records:
if "sub_categories" in each_record:
sub_categories = [item["sub_CategoryName"] for item in each_record["sub_categories"]]
each_record["sub_categories"] = ",".join(sub_categories)
final_json["data"]["bodyContent"] = deepcopy(records)
final_json["status"] = StatusMessages.SUCCESS
except Exception as e:
logger.debug("Exception occurred while fetching category list data" + str(e))
return final_json
def fetch_category_data(self, input_json):
final_json = {"status": StatusMessages.FAILED,
"data": dict()}
logger.debug("Inside fetch_category_data definition")
try:
records = self.mongo_obj.aggregate(
db_name=DBMapping.support_lens_configuration,
collection_name=DBMapping.category_configuration,
list_for_aggregation=[
{
'$match': {
'project_id': input_json["project_id"],
"category_id": input_json["category_id"]
}
}, {
'$project': {
'_id': 0,
'category_name': 'CategoryName',
"sub_categories": '$SubCategories',
'description': '$Description'
}
}
])
if len(records["sub_categories"]):
for data in records["sub_categories"]:
data['label'] = data.pop("sub_CategoryName")
final_json["data"] = deepcopy(records)
final_json["status"] = StatusMessages.SUCCESS
except Exception as e:
logger.debug("Exception occurred while fetching category data" + str(e))
return final_json
def save_category_data(self, input_json):
final_json = dict(status=StatusMessages.FAILED, message=StatusMessages.CATEGORY_DATA)
logger.debug("Inside save_category_data definition")
try:
for data in input_json["sub_categories"]:
data['sub_CategoryName'] = data.pop("label")
if data["sub_category_id"] in [str(), ""]:
sub_category_id = self.save_subCategory_data(label_name=data["sub_CategoryName"],
project_id=input_json["project_id"])
if not sub_category_id:
return final_json
data["sub_category_id"] = sub_category_id
if input_json["type"] == "save":
insert_json = dict(CategoryName=input_json["category_name"], Description=input_json["description"],
SubCategories=input_json["sub_categories"],
category_id="category_" + self.new_id.get_next_id("category"),
project_id=input_json["project_id"],
created_on=time.time(),
created_by=self.new_id.get_user_id(),
last_updated_on=time.time(),
last_updated_by=self.new_id.get_user_id()
)
self.mongo_obj.insert_one(database_name=DBMapping.support_lens_configuration,
collection_name=DBMapping.category_configuration,
json_data=insert_json)
if input_json["type"] == "edit":
update_json = dict(CategoryName=input_json["category_name"], Description=input_json["description"],
SubCategories=input_json["sub_categories"],
category_id=input_json["category_id"],
created_on=time.time(),
project_id=input_json["project_id"],
created_by=self.new_id.get_user_id(),
last_updated_on=time.time(),
last_updated_by=self.new_id.get_user_id()
)
query_json = dict(project_id=input_json["project_id"], category_id=input_json["category_id"])
self.mongo_obj.update_one(db_name=DBMapping.support_lens_configuration,
collection_name=DBMapping.category_configuration,
set_json=update_json, query=query_json, upsert=True)
final_json = dict(status=StatusMessages.SUCCESS, message="Category saved Successfully")
except Exception as e:
logger.debug("Exception occurred while saving category data" + str(e))
return final_json
def save_subCategory_data(self, label_name, project_id):
try:
insert_json = dict(sub_CategoryName=label_name,
sub_category_id="sub_category_" + self.new_id.get_next_id("sub_category"),
project_id=project_id,
created_on=time.time(),
created_by=self.new_id.get_user_id())
self.mongo_obj.insert_one(database_name=DBMapping.support_lens_configuration,
collection_name=DBMapping.subcategory_configuration,
json_data=insert_json)
return insert_json["sub_category_id"]
except Exception as e:
logger.debug("Exception occurred while saving save_subCategory_data" + str(e))
return False
def delete_category_data(self, input_json):
final_json = dict(status=StatusMessages.FAILED, message=StatusMessages.CATEGORY_DELETE)
try:
query_json = dict(project_id=input_json["project_id"], category_id=input_json["category_id"])
self.mongo_obj.delete_one_record(db_name=DBMapping.support_lens_configuration,
collection_name=DBMapping.category_configuration,
query_json=query_json)
final_json = dict(status=StatusMessages.SUCCESS, message="Category deleted Successfully")
except Exception as e:
logger.debug("Exception occurred while deleting category data" + str(e))
return final_json
from flask import Blueprint, request
from scripts.config.app_constants import Endpoints, StatusMessages
from scripts.core.handler.supportlens_handler import SupportLensHandler
from scripts.logging.logger import logger
from scripts.utils.AESEnc import apply_encryption
support_lens_blueprint = Blueprint("support_lens_blueprint", __name__)
handler_obj = SupportLensHandler()
@support_lens_blueprint.route(Endpoints.category_list, methods=['POST'])
@apply_encryption
def get_category_list():
final_json = {"status": StatusMessages.FAILED, "message": StatusMessages.CATEGORY_LIST}
try:
input_data = request.data
# input_data = request.get_json()
final_json = handler_obj.get_category_list(input_json=input_data)
except Exception as e:
logger.exception("Exception -> %s" % str(e))
return final_json
@support_lens_blueprint.route(Endpoints.category_save, methods=['POST'])
@apply_encryption
def save_category_data():
final_json = {"status": StatusMessages.FAILED, "message": StatusMessages.CATEGORY_DATA}
try:
input_data = request.data
# input_data = request.get_json()
input_data.update(type="save")
final_json = handler_obj.save_category_data(input_json=input_data)
except Exception as e:
logger.exception("Exception -> %s" % str(e))
return final_json
@support_lens_blueprint.route(Endpoints.category_edit, methods=['POST'])
@apply_encryption
def edit_category_data():
final_json = {"status": StatusMessages.FAILED, "message": StatusMessages.CATEGORY_DATA}
try:
input_data = request.data
# input_data = request.get_json()
input_data.update(type="edit")
final_json = handler_obj.save_category_data(input_json=input_data)
except Exception as e:
logger.exception("Exception -> %s" % str(e))
return final_json
@support_lens_blueprint.route(Endpoints.category_fetch, methods=['POST'])
@apply_encryption
def fetch_category_data():
final_json = {"status": StatusMessages.FAILED, "message": StatusMessages.CATEGORY_FETCH}
try:
input_data = request.data
# input_data = request.get_json()
final_json = handler_obj.fetch_category_data(input_json=input_data)
except Exception as e:
logger.exception("Exception -> %s" % str(e))
return final_json
@support_lens_blueprint.route(Endpoints.category_fetch, methods=['POST'])
@apply_encryption
def fetch_category_data():
final_json = {"status": StatusMessages.FAILED, "message": StatusMessages.CATEGORY_DATA}
try:
input_data = request.data
# input_data = request.get_json()
final_json = handler_obj.delete_category_data(input_json=input_data)
except Exception as e:
logger.exception("Exception -> %s" % str(e))
return final_json
# --- All errors from this code base belong to the code series 0001-1000 --- #
IL0001 = 'Error Code IL0001: Login timeout, Please Login again'
IL0002 = 'Error Code IL0002: ILens faced an issue when processing.' \
' Please contact the administrator to resolve the issue.'
IL0003 = 'Error Code IL0003: Unsupported method was requested to server'
# --- All broader exceptions codes belong to the code series 1001-2000 --- #
IL1001 = ''
# --- All errors from dependent libraries belong to their respective code series --- #
# All error in the MONGO... series are exception codes for errors related to MongoDB
MONGO001 = "Error Code MONGO001: Server was unable to establish connection with MongoDB"
MONGO002 = "Error Code MONGO002: Server faced a problem when inserting document(s) into MongoDB"
MONGO003 = "Error Code MONGO003: Server faced a problem to find the document(s) with the given condition"
MONGO004 = "Error Code MONGO004: Server faced a problem to delete the document(s) with the given condition"
MONGO005 = "Error Code MONGO005: Server faced a problem to update the document(s) with the given condition and data"
MONGO006 = "Error Code MONGO006: Server faced a problem when aggregating the data"
MONGO007 = "Error Code MONGO007: Server faced a problem when closing MongoDB connection"
MONGO008 = "Error Code MONGO008: Found an existing record with the same ID in MongoDB"
MONGO009 = "Error Code MONGO009: Server faced a problem when fetching distinct documents from MongoDB"
MONGO010 = "Error Code MONGO010: Server faced a problem when performing a search and replace in MongoDB"
MONGO011 = "Error Code MONGO011: Server faced a problem when de-serializing MongoDB object"
# All error in the DCK... series are exception codes for errors related to Docker
DCK001 = "Error Code DCK001: Invalid docker client identifier"
class ILensException(Exception):
pass
class MongoException(ILensException):
pass
class MongoConnectionException(MongoException):
pass
class MongoQueryException(MongoException):
pass
class MongoEncryptionException(MongoException):
pass
class MongoRecordInsertionException(MongoQueryException):
pass
class MongoFindException(MongoQueryException):
pass
class MongoDeleteException(MongoQueryException):
pass
class MongoUpdateException(MongoQueryException):
pass
class MongoUnknownDatatypeException(MongoEncryptionException):
pass
class MongoDistictQueryException(MongoException):
pass
class MongoFindAndReplaceException(MongoException):
pass
class MongoObjectDeserializationException(MongoException):
pass
class DeviceManagerException(ILensException):
pass
class DeviceNameAlreadyExistsException(DeviceManagerException):
pass
import logging
import os
from logging.handlers import RotatingFileHandler
from scripts.config import app_configuration
_log_file_name__ = str(app_configuration.LOG_FILE_NAME)
__log_path__ = str(app_configuration.LOG_PATH)
__log_level__ = str(app_configuration.LOG_LEVEL)
__handler_type__ = str(app_configuration.LOG_HANDLER)
__max_bytes__ = int(app_configuration.LOG_MAX_BYTES)
__backup_count__ = int(app_configuration.LOG_BACKUP_COUNT)
complete_log_path = os.path.join(__log_path__, _log_file_name__)
if not os.path.isdir(__log_path__):
os.makedirs(__log_path__)
logging.trace = logging.DEBUG - 5
logging.addLevelName(logging.DEBUG - 5, 'TRACE')
class ILensLogger(logging.getLoggerClass()):
def __init__(self, name):
super().__init__(name)
def trace(self, msg, *args, **kwargs):
if self.isEnabledFor(logging.trace):
self._log(logging.trace, msg, args, **kwargs)
def get_logger(log_file_name=complete_log_path, log_level=__log_level__, time_format="%Y-%m-%d %H:%M:%S",
handler_type=__handler_type__, max_bytes=__max_bytes__, backup_count=__backup_count__):
"""
Creates a rotating log
"""
log_file = os.path.join(log_file_name + '.log')
logging.setLoggerClass(ILensLogger)
__logger__ = logging.getLogger(log_file_name)
__logger__.setLevel(log_level.strip().upper())
debug_formatter = '%(asctime)s - %(levelname)-6s - %(name)s - [%(threadName)5s:%(filename)5s:' \
'%(funcName)5s():''%(lineno)s] - %(message)s'
formatter_string = '%(asctime)s - %(levelname)-6s - %(name)s - %(levelname)3s - %(message)s'
if log_level.strip().upper() == log_level:
formatter_string = debug_formatter
formatter = logging.Formatter(formatter_string, time_format)
# if str(handler_type).lower() == "console_handler":
# Console Handler
console_handler = logging.StreamHandler()
console_handler.setFormatter(formatter)
if __logger__.hasHandlers():
__logger__.handlers.clear()
# __logger__.addHandler(console_handler)
if str(handler_type).lower() == "rotating_file_handler":
# Rotating File Handler
handler = RotatingFileHandler(log_file, maxBytes=max_bytes, backupCount=backup_count)
handler.setFormatter(formatter)
if __logger__.hasHandlers():
__logger__.handlers.clear()
__logger__.addHandler(handler)
else:
# File Handler
hdlr_service = logging.FileHandler(log_file)
hdlr_service.setFormatter(formatter)
if __logger__.hasHandlers():
__logger__.handlers.clear()
__logger__.addHandler(hdlr_service)
return __logger__
logger = get_logger()
import base64
import json
import uuid
import hashlib
from functools import wraps
from flask import request, make_response, jsonify, session
from Cryptodome import Random
from Cryptodome.Cipher import AES
# from Crypto import Random
# from Crypto.Cipher import AES
from datetime import datetime
from scripts.config import app_configuration as config, app_constants, app_configuration
max_age_in_mins = config.cookie_max_age
class AESCipher(object):
"""
A classical AES Cipher. Can use any size of data and any size of password thanks to padding.
Also ensure the coherence and the type of the data with a unicode to byte converter.
"""
def __init__(self, key):
self.bs = 16
self.key = AESCipher.str_to_bytes(key)
@staticmethod
def str_to_bytes(data):
u_type = type(b''.decode('utf8'))
if isinstance(data, u_type):
return data.encode('utf8')
return data
def _pad(self, s):
return s + (self.bs - len(s) % self.bs) * AESCipher.str_to_bytes(chr(self.bs - len(s) % self.bs))
@staticmethod
def _unpad(s):
return s[:-ord(s[len(s) - 1:])]
def encrypt(self, raw):
raw = self._pad(AESCipher.str_to_bytes(raw))
iv = Random.new().read(AES.block_size)
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return base64.b64encode(iv + cipher.encrypt(raw)).decode('utf-8')
def decrypt(self, enc):
enc = base64.b64decode(enc)
iv = enc[:AES.block_size]
cipher = AES.new(self.key, AES.MODE_CBC, iv)
data = self._unpad(cipher.decrypt(enc[AES.block_size:]))
return data.decode('utf-8')
def create_cookie(request_json, request, user_id=None):
"""
This method is to create a cookie
"""
status = False
cookie_encoded_str = ""
# user_id = ""
try:
if user_id is None:
user_id = request.cookies.get("user_id")
client_ip = request.headers.get('X-Forwarded-For').split(',')[0].split(':')[0]
cookie_string = (
user_id + '^' +
client_ip + "^" +
request.headers.get('User-Agent') + "^" + datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ'))
cookie_encoded_str = AESCipher(app_constants.KEYS.cookie_encryption_private_key).encrypt(cookie_string)
return True, cookie_encoded_str
except Exception as e:
# traceback.print_exc()
return status, cookie_encoded_str
def validate_cookie(cookie_enc_str, request):
"""
This method is to validate the cookie
"""
status = False
encrypted_str = None
ip_check = True
user_id = request.cookies['user_id']
try:
headers = request.headers
decrypted_string = AESCipher(app_constants.KEYS.cookie_encryption_private_key).decrypt(cookie_enc_str)
decrypted_data = decrypted_string.split('^')
client_ip = headers.get('X-Forwarded-For').split(',')[0].split(':')[0]
user_agent = request.headers.get('User-Agent')
time_diff = (datetime.now() - datetime.strptime(decrypted_data[3], "%Y-%m-%dT%H:%M:%SZ")).total_seconds()
if time_diff > max_age_in_mins * 60:
raise Exception("Session Expired")
#Commented the existing code for Hotfix - Session Logout happening due to Validation of IP happening in Client Networks Fixed by VigneshRavishankar
#if decrypted_data[0] == user_id and \
# decrypted_data[1] == client_ip and \
# decrypted_data[2] == request.headers.get('User-Agent'):
# encrypted_str = decrypted_data
# return True, encrypted_str
# print(f'Input User: {decrypted_data[0]} | Validate User: {user_id}')
# print(f'Input IP: {decrypted_data[1]} | Validate IP: {client_ip}')
# print(f'Input Agent: {decrypted_data[2]} | Validate Agent: {user_agent}')
if decrypted_data[0] == user_id:
user_check = True
else:
raise Exception('Invalid Cookie (User validation failed)')
if app_configuration.ip_check:
if decrypted_data[1] == client_ip:
ip_check = True
else:
raise Exception('Invalid Cookie (Agent validation failed)')
if decrypted_data[2] == user_agent:
agent_check = True
else:
raise Exception('Invalid Cookie (Agent validation failed)')
if user_check and ip_check and agent_check:
encrypted_str = decrypted_data
return True, encrypted_str
else:
raise Exception("Invalid cookie")
except Exception as e:
print("Error : {}".format(str(e)))
return status, encrypted_str
def apply_encryption(func):
"""
This function is to be use as a decorator only,
This function works like a wrapper which will decrypt the data before sending to the function and also encrypt
the data before returning.
:param func:
:return: wrapper
"""
@wraps(func)
def wrapper(*args, **kwargs):
if config.SERVICE_ENABLE_SECURITY is True:
session_id = str(request.cookies['session_id'])[:16]
data = request.data
data = data.decode()
if 'session_id' in request.cookies:
status, encoded_str = validate_cookie(request.cookies['session_id'], request)
else:
return make_response(json.dumps({"status": "session_invalid", "message": "Invalid Session"}), 401)
if status:
request.data = json.loads(AESCipher(session_id).decrypt(data))
response = func(*args, **kwargs)
data = AESCipher(session_id).encrypt(json.dumps(response))
return data
else:
return make_response(json.dumps({"status": "session_invalid", "message": "Invalid Session"}), 401)
else:
# if "session_id" not in session:
# return make_response(json.dumps({"status": "session_invalid", "message": "Invalid Session"}), 401)
request.data = json.loads(request.data.decode())
if 'session_id' in request.cookies:
status, encoded_str = validate_cookie(request.cookies['session_id'], request)
else:
return make_response(json.dumps({"status": "session_invalid", "message": "Invalid Session"}), 401)
if status:
response = func(*args, **kwargs)
if str(request.url_rule).endswith('reset_password'):
return response
else:
response = make_response(json.dumps(response), 200)
response.headers['X-Content-Type-Options'] = 'nosniff'
response.headers['X-Frame-Options'] = 'SAMEORIGIN'
response.headers['Cache-Control'] = 'no-store'
cookie_status, secure_cookie = create_cookie(request.data, request)
# response.set_cookie('session_id', request.cookies['session_id'], max_age=max_age_in_mins * 60 ,httponly=app_configuration.http_flag)
if cookie_status:
response.set_cookie('session_id', secure_cookie, httponly=app_configuration.http_flag)
return response
else:
raise Exception("Failed to set cookie")
else:
return make_response(json.dumps({"status": "session_invalid", "message": "Invalid Session"}), 401)
return wrapper
def login_wrap(func):
"""
This function is used as a decorator only for login services
:param func:
:return: wrapper
"""
@wraps(func)
def wrap_login(*args, **kwargs):
if config.SERVICE_ENABLE_SECURITY is True:
# session_id_1 = str(uuid.uuid4()).replace("-", "")
# session["session_id"] = session_id_1
session_id = str(session["session_id"])
response = func(*args, **kwargs)
resp = make_response(response, 200)
resp.set_cookie("session_id", session_id, max_age=max_age_in_mins * 60)
return resp
else:
# session_id_1 = str(uuid.uuid4()).replace("-", "")
# session["session_id"] = session_id_1
session_id = str(session["session_id"])
request.data = json.loads(request.data.decode())
response = func(*args, **kwargs)
response = make_response(response, 200)
response.set_cookie("session_id", session_id, max_age=max_age_in_mins * 60)
return response
return wrap_login
class CipherEncryption(object):
def __init__(self, key):
self.bs = AES.block_size
self.key = hashlib.sha256(key.encode()).digest()
def encrypt(self, raw):
raw = self._pad(raw)
iv = Random.new().read(AES.block_size)
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return base64.b64encode(iv + cipher.encrypt(raw.encode()))
def decrypt(self, enc):
enc = base64.b64decode(enc)
iv = enc[:AES.block_size]
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return self._unpad(cipher.decrypt(enc[AES.block_size:])).decode('utf-8')
def _pad(self, s):
return s + (self.bs - len(s) % self.bs) * chr(self.bs - len(s) % self.bs)
@staticmethod
def _unpad(s):
return s[:-ord(s[len(s) - 1:])]
from flask import request
from scripts.config import app_constants
from scripts.utils.mongo_utility import MongoConnect
from scripts.config.db_connection_obj import ConnectionObj
if ConnectionObj.mongo_connection_obj is None:
ConnectionObj.mongo_connection_obj = MongoConnect()
my_col = app_constants.DBMapping.unique_id
metadata = app_constants.DBMapping.mongo_db_name
class GetNewId:
@staticmethod
def get_next_id(param1):
my_doc = ConnectionObj.mongo_connection_obj.find_one(metadata, my_col, {"key": param1})
if not my_doc:
my_dict = {"key": param1, "id": "100"}
ConnectionObj.mongo_connection_obj.database_insertion(metadata, my_col, my_dict)
return my_dict['id']
else:
my_query = {"key": param1}
my_doc = [ConnectionObj.mongo_connection_obj.find_one(metadata, my_col, my_query, {"_id": 0, "id": 1})]
for each_document in my_doc:
count_value = str(int(each_document['id']) + 1)
new_values = {"$set": {"id": count_value}}
ConnectionObj.mongo_connection_obj.update_one(metadata, my_col, my_query, new_values)
return str(int(each_document['id']) + 1)
@staticmethod
def get_user_id(input_data=None):
if "user_id" in request.cookies:
user_id_from_cookies = request.cookies.get("user_id")
# if enable_security:
# session_id = request.cookies.get("session_id")
# key = session_id[:16]
# user_id = AESCipher(key).decrypt(user_id_from_cookies)
# else:
user_id = user_id_from_cookies
return {"status": "success", "user_id": user_id}
else:
if "user_id" in input_data and input_data["user_id"] != "":
user_id = input_data["user_id"]
return {"status": "success", "user_id": user_id}
else:
return {"status": "failed", "message": "user_id is missing from input"}
\ No newline at end of file
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment