Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
S
support_lens
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
CI / CD Analytics
Repository Analytics
Value Stream Analytics
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
harshavardhan.c
support_lens
Commits
1c975cab
Commit
1c975cab
authored
Dec 31, 2020
by
harshavardhan.c
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Added new services for the case configuration
parent
6d69a778
Changes
12
Show whitespace changes
Inline
Side-by-side
Showing
12 changed files
with
958 additions
and
216 deletions
+958
-216
app.py
app.py
+20
-0
conf/app/application.conf
conf/app/application.conf
+2
-78
scripts/config/app_configuration.py
scripts/config/app_configuration.py
+1
-94
scripts/config/app_constants.py
scripts/config/app_constants.py
+173
-4
scripts/core/handler/category_configuration_handler.py
scripts/core/handler/category_configuration_handler.py
+78
-30
scripts/core/handler/resolver_setup_handler.py
scripts/core/handler/resolver_setup_handler.py
+211
-0
scripts/core/handler/supportcase_setup_handler.py
scripts/core/handler/supportcase_setup_handler.py
+306
-0
scripts/core/services/category_configuration.py
scripts/core/services/category_configuration.py
+9
-9
scripts/core/services/resolver_setup_configuration.py
scripts/core/services/resolver_setup_configuration.py
+89
-0
scripts/core/services/supportcase_setup_configuration.py
scripts/core/services/supportcase_setup_configuration.py
+62
-0
scripts/utils/get_new_id.py
scripts/utils/get_new_id.py
+1
-1
scripts/utils/mongo_utility.py
scripts/utils/mongo_utility.py
+6
-0
No files found.
app.py
0 → 100644
View file @
1c975cab
from
flask
import
Flask
from
scripts.config
import
app_configuration
as
conf
from
scripts.core.services.category_configuration
import
category_configuration
from
scripts.core.services.resolver_setup_configuration
import
resolver_configuration
from
scripts.core.services.supportcase_setup_configuration
import
supportcase_setup_configuration
from
scripts.logging.logger
import
logger
app
=
Flask
(
__name__
)
app
.
register_blueprint
(
category_configuration
)
app
.
register_blueprint
(
resolver_configuration
)
app
.
register_blueprint
(
supportcase_setup_configuration
)
if
__name__
==
"__main__"
:
logger
.
warning
(
"This is debug only Runtime. Run wsgi.py in production environment.
\n
"
"For more information, refer Read Me.
\n
For help, contact developer. Don't screw up!"
)
logger
.
info
(
"Debug Runtime Starting......"
)
app
.
run
(
host
=
conf
.
SERVICE_HOST
,
port
=
conf
.
SERVICE_PORT
,
debug
=
True
,
threaded
=
True
,
use_reloader
=
False
)
conf/app/application.conf
View file @
1c975cab
[
service
]
[
service
]
name
=
ilens
name
=
ilens
host
=
0
.
0
.
0
.
0
host
=
0
.
0
.
0
.
0
port
=
8585
port
=
9090
interface
=
eth0
interface
=
eth0
SECRET_KEY
=
KLKey
SECRET_KEY
=
KLKey
enable_security
=
False
enable_security
=
False
...
@@ -14,6 +14,7 @@ reset_time_interval = 0.5
...
@@ -14,6 +14,7 @@ reset_time_interval = 0.5
apply_processor_count
=
False
apply_processor_count
=
False
workers
=
3
workers
=
3
threads
=
6
threads
=
6
files
=
files
ip_check
=
False
ip_check
=
False
[
log
]
[
log
]
...
@@ -33,81 +34,4 @@ authSource=
...
@@ -33,81 +34,4 @@ authSource=
authMechanism
=
authMechanism
=
mongo_constants_file_path
=
conf
/
mongo_encryption_constants
.
json
mongo_constants_file_path
=
conf
/
mongo_encryption_constants
.
json
[
kairos_db
]
url
=
http
://
ilens_kairos
:
8080
[
system_login
]
domain_list
= [
"knowledgelens.com"
]
[
csv_conf
]
csv_path
=
Log
/
csv
/
upload_path
=
Log
/
UPLOAD
[
schedule_rule_engine
]
url
=
http
://
localhost
:
9997
/
create_job
[
profile_pic_path
]
base_path
=
templates
/
profile_pic
/
[
email_default_baseurl
]
base_url
=
https
://
app
.
ilens
.
io
/
cloud
/
send_mail
[
ai_rules
]
enabled
=
true
docker_endpoint
=
tcp
://
localhost
:
4243
[
flow_model
]
container_url
=
http
://
localhost
:
8180
[
IMAGE
]
path
=
images
/
[
LICENSE_PATH
]
path
=
license
/
[
pipeline_internal
]
mqtt_broker_host
=
192
.
168
.
0
.
220
mqtt_broker_port
=
1883
mqtt_broker_ssl
=
false
mqtt_broker_conn_type
=
tcp
mqtt_broker_ws_port
=
8083
node_intermediate
=
mqtt
queue_host
=
192
.
168
.
0
.
220
queue_port
=
9092
[
channel_pp_debug_node
]
host
=
192
.
168
.
0
.
220
port
=
1883
topic
=
ilens
/
pipeline
/
debug
user_name
=
password
=
[
AGENT
]
manager_url
=
http
://
192
.
168
.
0
.
220
/
ilens_api
[
REDIS
]
host
=
192
.
168
.
0
.
220
port
=
6379
key_expiry
=
100
rules_db
=
0
alarms_db
=
1
live_tags_db
=
4
audit_db
=
6
audit_queue
=
audit
[
ILENS_VERSION
]
version
=
v5
.
1
[
DATA_PROCESSOR
]
host
=
localhost
connection
=
tcp
port
=
1883
topic
=
ilens
/
monitor
/
live
/
device_monitor_alarms
[
LICENSE_SERVER
]
host
=
192
.
168
.
0
.
220
port
=
9816
scripts/config/app_configuration.py
View file @
1c975cab
import
configparser
import
configparser
import
json
import
os
import
os
# read config file based on already set environment variable : APP_ENV
# read config file based on already set environment variable : APP_ENV
...
@@ -39,6 +38,7 @@ APPLY_PROCESSOR_COUNT = config.getboolean("service", "apply_processor_count", fa
...
@@ -39,6 +38,7 @@ APPLY_PROCESSOR_COUNT = config.getboolean("service", "apply_processor_count", fa
workers
=
config
.
getint
(
"service"
,
"workers"
,
fallback
=
1
)
workers
=
config
.
getint
(
"service"
,
"workers"
,
fallback
=
1
)
threads
=
config
.
getint
(
"service"
,
"threads"
,
fallback
=
1
)
threads
=
config
.
getint
(
"service"
,
"threads"
,
fallback
=
1
)
ip_check
=
config
.
getboolean
(
"service"
,
"ip_check"
,
fallback
=
False
)
ip_check
=
config
.
getboolean
(
"service"
,
"ip_check"
,
fallback
=
False
)
FILES_SAVE_PATH
=
config
.
get
(
"service"
,
"files"
,
fallback
=
"files"
)
cookie_max_age
=
config
.
getint
(
"service"
,
"cookie_max_age_in_mins"
,
fallback
=
60
)
cookie_max_age
=
config
.
getint
(
"service"
,
"cookie_max_age_in_mins"
,
fallback
=
60
)
...
@@ -70,96 +70,3 @@ MONGO_AUTHSOURCE = config["mongo_db"]["authSource"]
...
@@ -70,96 +70,3 @@ MONGO_AUTHSOURCE = config["mongo_db"]["authSource"]
MONGO_AUTHMECHANISM
=
config
[
"mongo_db"
][
"authMechanism"
]
MONGO_AUTHMECHANISM
=
config
[
"mongo_db"
][
"authMechanism"
]
encryption_constants_file_path
=
config
[
"mongo_db"
][
"mongo_constants_file_path"
]
encryption_constants_file_path
=
config
[
"mongo_db"
][
"mongo_constants_file_path"
]
"""
Kairos Info
"""
KAIROS_DB_URL
=
config
[
"kairos_db"
][
"url"
]
"""
Additional info
"""
SYSTEM_LOGIN_DOMAIN_DOMAIN_LIST
=
json
.
loads
(
config
[
"system_login"
][
"domain_list"
])
CSV_PATH
=
config
[
"csv_conf"
][
"csv_path"
]
UPLOAD_CSV_PATH
=
config
[
"csv_conf"
][
"upload_path"
]
PROFILE_PIC_PATH
=
config
[
"profile_pic_path"
][
"base_path"
]
"""
Manual Entry conf
"""
SCHEDULER_BASE_URL
=
config
[
"schedule_rule_engine"
][
"url"
]
"""
Upload Parser
"""
# upload_parser = config["upload_parser"]
"""
Default email sender
"""
default_email_url
=
config
[
"email_default_baseurl"
][
"base_url"
]
"""
AI Rule : Auto Container Deployment Service
"""
AI_RULE_STATUS
=
config
[
"ai_rules"
][
"enabled"
]
if
AI_RULE_STATUS
.
lower
()
==
"true"
:
AI_RULE_DOCKER_ENDPOINT
=
config
[
"ai_rules"
][
"docker_endpoint"
]
else
:
AI_RULE_DOCKER_ENDPOINT
=
"localhost"
CONTAINER_URL
=
config
[
"flow_model"
][
"container_url"
]
IMAGE_PATH
=
config
.
get
(
'IMAGE'
,
'path'
,
fallback
=
"images/"
)
LICENSE_PATH
=
config
.
get
(
'LICENSE_PATH'
,
'path'
,
fallback
=
"license/"
)
PIPELINE_INTERNAL_SECTION
=
'pipeline_internal'
PIPELINE_INTERNAL_CONFIGURATION
=
{
"mqtt_broker_host"
:
config
.
get
(
PIPELINE_INTERNAL_SECTION
,
'mqtt_broker_host'
,
fallback
=
'localhost'
),
"mqtt_broker_port"
:
config
.
get
(
PIPELINE_INTERNAL_SECTION
,
'mqtt_broker_port'
,
fallback
=
'1883'
),
"mqtt_broker_ssl"
:
config
.
get
(
PIPELINE_INTERNAL_SECTION
,
'mqtt_broker_ssl'
,
fallback
=
'false'
),
"mqtt_broker_conn_type"
:
config
.
get
(
PIPELINE_INTERNAL_SECTION
,
'mqtt_broker_conn_type'
,
fallback
=
'tcp'
),
"mqtt_broker_ws_port"
:
config
.
get
(
PIPELINE_INTERNAL_SECTION
,
'mqtt_broker_ws_port'
,
fallback
=
'8083'
),
"kafka_broker"
:
config
.
get
(
PIPELINE_INTERNAL_SECTION
,
'queue_host'
,
fallback
=
'localhost'
),
"kafka_port"
:
config
.
get
(
PIPELINE_INTERNAL_SECTION
,
'queue_port'
,
fallback
=
'localhost'
),
"broker_ssl_path"
:
config
.
get
(
PIPELINE_INTERNAL_SECTION
,
'broker_ssl_path'
,
fallback
=
'/opt/ssl'
),
"node_intermediate"
:
'kafka'
if
config
.
get
(
PIPELINE_INTERNAL_SECTION
,
'node_intermediate'
,
fallback
=
'kafka'
)
==
'queue'
else
"mqtt"
}
CHANNEL_PIPELINE_CONFIG
=
config
[
'channel_pp_debug_node'
]
MQTT_BROKER_HOST
=
config
[
'pipeline_internal'
][
"mqtt_broker_host"
]
MQTT_BROKER_PORT
=
config
[
'pipeline_internal'
][
"mqtt_broker_port"
]
MQTT_BROKER_SSL
=
config
[
'pipeline_internal'
][
"mqtt_broker_ssl"
]
MQTT_BROKER_CONN_TYPE
=
config
[
'pipeline_internal'
][
"mqtt_broker_conn_type"
]
MQTT_BROKER_WS_PORT
=
config
[
'pipeline_internal'
][
"mqtt_broker_ws_port"
]
BROKER_SSL_PATH
=
PIPELINE_INTERNAL_CONFIGURATION
.
get
(
"broker_ssl_path"
,
'/opt/ssl'
)
MANAGER_URL
=
config
[
"AGENT"
][
"manager_url"
]
DEVICE_MODE
=
config
.
get
(
'AGENT'
,
'device_mode'
,
fallback
=
'agent'
)
SSL_CERT_PATH
=
config
.
get
(
'AGENT'
,
'ssl_base_path'
,
fallback
=
'/opt/ssl'
)
TIME_SYNC_URL
=
config
.
get
(
'AGENT'
,
'time_sync_url'
,
fallback
=
None
)
TIME_SYNC_URL_CERT_PATH
=
config
.
get
(
'AGENT'
,
'cert_path'
,
fallback
=
False
)
# Redis Details
redis_host
=
str
(
config
[
"REDIS"
][
"host"
])
redis_port
=
int
(
config
[
"REDIS"
][
"port"
])
key_expiry_in_secs
=
int
(
config
[
"REDIS"
][
"key_expiry"
])
rules_redis_db
=
int
(
config
[
"REDIS"
][
"rules_db"
])
alarms_redis_db
=
int
(
config
[
"REDIS"
][
"alarms_db"
])
live_tags_db
=
int
(
config
[
"REDIS"
][
"live_tags_db"
])
audit_db
=
int
(
config
[
"REDIS"
][
"audit_db"
])
audit_queue
=
str
(
config
[
"REDIS"
][
"audit_queue"
])
#iLens version
version
=
str
(
config
[
"ILENS_VERSION"
][
"version"
])
# Data Processor configs
data_processor_host
=
config
.
get
(
'DATA_PROCESSOR'
,
'host'
,
fallback
=
None
)
data_processor_conn_type
=
config
.
get
(
'DATA_PROCESSOR'
,
'connection'
,
fallback
=
'tcp'
)
data_processor_ssl
=
bool
(
config
.
get
(
'DATA_PROCESSOR'
,
'ssl_enabled'
,
fallback
=
False
))
data_processor_port
=
int
(
config
.
get
(
'DATA_PROCESSOR'
,
'port'
,
fallback
=
1883
))
data_processor_topic
=
str
(
config
.
get
(
'DATA_PROCESSOR'
,
'topic'
,
fallback
=
None
))
LICENSE_SERVER
=
config
.
get
(
'LICENSE_SERVER'
,
'host'
,
fallback
=
"localhost"
)
LICENSE_PORT
=
config
.
get
(
'LICENSE_SERVER'
,
'port'
,
fallback
=
9816
)
\ No newline at end of file
scripts/config/app_constants.py
View file @
1c975cab
class
Endpoints
:
class
Endpoints
:
base_url
=
"/support_lens"
base_url
=
"/support_lens"
category_list
=
base_url
+
"/category/list"
list_category
=
base_url
+
"/category/list"
category_save
=
base_url
+
"/category/save"
save_category
=
base_url
+
"/category/save"
category_edit
=
base_url
+
"/category/edit"
edit_category
=
base_url
+
"/category/edit"
category_fetch
=
base_url
+
"/category/fetch"
fetch_category
=
base_url
+
"/category/fetch"
delete_category
=
base_url
+
"/category/delete"
fetch_resolver_meta
=
base_url
+
"/resolver/fetch_meta"
fetch_resolver_data
=
base_url
+
"/resolver/fetch"
save_resolver
=
base_url
+
"/resolver/save"
edit_resolver
=
base_url
+
"/resolver/edit"
delete_resolver
=
base_url
+
"/resolver/delete"
list_resolver
=
base_url
+
"/resolver/list"
save_support_case
=
base_url
+
"/support_case/create"
fetch_support_case
=
base_url
+
"/support_case/fetch"
delete_support_case
=
base_url
+
"/support_case/delete"
edit_support_case
=
base_url
+
"/support_case/edit"
get_support_case_table_details
=
base_url
+
"/support_case/fetch_table"
class
DBMapping
:
class
DBMapping
:
# DBConstants
# DBConstants
support_lens_configuration
=
"supportlens_configuration"
support_lens_configuration
=
"supportlens_configuration"
ilens_configuration
=
"ilens_configuration"
# CollectionConstants
# CollectionConstants
category_configuration
=
"category_configuration"
category_configuration
=
"category_configuration"
subcategory_configuration
=
"subcategory_configuration"
subcategory_configuration
=
"subcategory_configuration"
resolver_configuration
=
"resolver_configuration"
ticket_configuration
=
"ticket_configuration"
site_conf
=
"site_conf"
user
=
"user"
unique_id
=
"unique_id"
customer_projects
=
"customer_projects"
class
CaseStatus
:
OPEN
=
"open"
DELETE
=
"delete"
class
StatusMessages
:
class
StatusMessages
:
...
@@ -25,6 +51,14 @@ class StatusMessages:
...
@@ -25,6 +51,14 @@ class StatusMessages:
CATEGORY_DELETE
=
"Failed to delete category data"
CATEGORY_DELETE
=
"Failed to delete category data"
CATEGORY_FETCH
=
"Failed to fetch category data"
CATEGORY_FETCH
=
"Failed to fetch category data"
RESOLVER_LIST
=
"Failed to fetch resolver list"
RESOLVER_DATA
=
"Failed to save resolver data"
RESOLVER_DELETE
=
"Failed to delete resolver data"
RESOLVER_FETCH
=
"Failed to fetch resolver data"
SUPPORTCASE_SAVE
=
"Failed to save support case details"
SUPPORTCASE_FETCH
=
"Failed to fetch support case details"
class
StaticJsons
:
class
StaticJsons
:
CATEGORY_HEADERCONTENT
=
[
CATEGORY_HEADERCONTENT
=
[
...
@@ -41,3 +75,138 @@ class StaticJsons:
...
@@ -41,3 +75,138 @@ class StaticJsons:
"key"
:
"sub_categories"
"key"
:
"sub_categories"
}
}
]
]
RESOLEVER_HEADERCONTENT
=
[
{
"label"
:
"Resolver Group Name"
,
"key"
:
"resolver_group_name"
},
{
"label"
:
"Description"
,
"key"
:
"description"
},
{
"label"
:
"Category"
,
"key"
:
"category"
},
{
"label"
:
"Sub Categories"
,
"key"
:
"sub_categories"
}
]
SUPPORTLENS_FETCHTABLE_HEADERCONTENT
=
[
{
"value"
:
"case_number"
,
"label"
:
"case Number"
,
"enable_column_search"
:
True
,
"header_type"
:
"text"
},
{
"value"
:
"subject"
,
"label"
:
"Subject"
,
"enable_column_search"
:
True
,
"header_type"
:
"text"
},
{
"value"
:
"customer_project_id"
,
"label"
:
"Customer Project ID"
,
"enable_column_search"
:
True
,
"header_type"
:
"text"
},
{
"value"
:
"status"
,
"label"
:
"Status"
,
"enable_column_search"
:
True
,
"header_type"
:
"select"
,
"options"
:
[]
},
{
"value"
:
"created_by"
,
"label"
:
"Created By"
,
"enable_column_search"
:
True
,
"header_type"
:
"select"
,
"options"
:
[]
},
{
"value"
:
"last_updated"
,
"label"
:
"Last Updated"
,
"enable_column_search"
:
True
,
"header_type"
:
"date_range"
}
]
SUPPORTLENS_FETCHTABLE_TABLEACTIONS
=
{
"actions"
:
[
{
"action"
:
"edit"
,
"label"
:
"Edit"
,
"type"
:
"edit"
,
"icon-class"
:
"fa fa-pencil"
},
{
"action"
:
"view"
,
"label"
:
"View"
,
"type"
:
"view"
,
"icon-class"
:
"fa fa-eye"
},
{
"action"
:
"delete"
,
"label"
:
"Delete"
,
"type"
:
"delete"
,
"icon-class"
:
"fa fa-trash"
}
],
"enableActions"
:
True
,
"externalActions"
:
[
]
}
class
MongoEncryptionConstants
:
# mongo exception codes
MONGO001
=
"Server was unable to eestablish connection with MongoDB"
MONGO002
=
"Server faced a problem when inserting document(s) into MongoDB"
MONGO003
=
"Server faced a problem to find the document(s) with the given condition"
MONGO004
=
"Server faced a problem to delete the document(s) with the given condition"
MONGO005
=
"Server faced a problem to update the document(s) with the given condition and given data"
MONGO006
=
"Server faced a problem when aggregating the data"
MONGO007
=
"Server faced a problem when closing the connection with MongoDB"
# mongo encryption keys
key_encrypt_keys
=
"encrypt_keys"
key_exclude_encryption
=
"exclude_encryption"
product_encrypted
=
"product_encrypted"
max_docs_per_batch
=
5
# cipher_key = "a985195aaa464e61"
# Product based configurable constants
cipher_key
=
{
'k'
:
'-----BEGIN RSA PRIVATE KEY-----
\n
MIIEowIBAAKCAQEArVED5cr+tMtFtVmXl2O0cvQbEgoYSIFd8yvkmm6z7'
'XAdX6Eg
\n
YkKez0ydTl26KOdJ18A7Kn8etWGe8nTkSGheJl9rn/J+lE1zpo4Zg/T3wDnM8FM3
\n
yuM26vpIb+0oJmNc9'
'DkFXo4WtxRFZDytdETg/YyI+eJYDRDrZSrlqAzIDpAdLpv9
\n
UhsMhYQ+2n3PcauLeJb0dKPVTc6kSvGCs3LZ0WyTbRnQ'
'yJMCWnaxzpSIUcH7qaqO
\n
KC/fBCKsZmRjRNSmQ3gepz4VnQKyJCm7CJk+cQiQMQzrspRPvhmGouHZUM36KjsG
\n
6ylx2'
'Bu6OYy/HbrdRkJKNlv3u6BBL6Pn/ZJZGQIDAQABAoIBABI8eMhESnYbm1RI
\n
W8S8YzeIO1Pz13hDku7cArcEKG72kcSm'
'58knAN5HjbK59nVI1tJ6gc84JnNH1Qlm
\n
ZsG+p49qkWC4S3zPxHg1MfaaPzpM6qUr4G4656OkV5xdTBDz+gshd9Dp6vZ'
'zDdUc
\n
9FRMTg8nqx79461mRxpzP8xloaQ0NcKBzFK9e3g/4i72LwgNP3E6xmESiu7goqJ1
\n
GOAI2mJie3TTY1z8sf4u'
'iSFLMaFrExkq4z4KkwS7qF2nOJxhv8H/g9TGPNWrnzAw
\n
yBHwINBoUaJwiOT51xxIDLgNQiNoIFuaMKVu2l+rWtoQWKG'
'iOnw1ZhYxeJCXByXC
\n
QqpAfgECgYEAwpzSfyot3PAlxm9iVK5Zc6lRdBq7Jazt7t91U6zeY7C4xzNG1Tuf
\n
cSYK3qRwl'
'Mw2uXl9auxyV41rziX9sZhtFUnm4jcGv9MHeaAaSSPSsvrtZDFBS7ky
\n
l2Ixk1078LTZCLMYmAKCAr2XLmShBPSVcuaL'
'kDRX4rvw7scWmMb86wECgYEA4/yC
\n
EAjXlL0WlOYDJ3J//Pg4iBtIedHXmn30goNuCBBaoYygXapeytEmU2q5hybQTMTX'
'
\n
Vl/vIAFiu0TX81VQ7LDLJaber/7GEsIT3x+xm0jFvOxFYVhT5b0s0z1CQolnRFsA
\n
dIwQ5u5GkP65hyJUa3ZMh+L6Vi'
'sSCTKpAco9ZhkCgYAKFZ5CwKjHvhn3AmaSBMbV
\n
23xBC/GOrjtWGXY288pCWDH7AIk3G3PTpSkDCHpc+4gKbGU3WTFDoC'
'xp7kYLId7l
\n
L4MrTban0gOdJdK234hXhfEvM+4yQlKAzbHL9RTaEET+0mj/14FtKu3elZBSdWoZ
\n
HiE1Q8EaGqsNdHuT'
'RxxsAQKBgQCqw7enyveusPL5FE/IfDrhgArX55ehp0uWrEE4
\n
gLkp0RYRawOzJKlbwMy0LnxIfwoGdmnUiIbTsjYBjs8'
'xs/WpU8LVsOeba3lxE21O
\n
8q5VYWy61T4ia9ZrjgbFMl0u+TwgNwlgQolmb5Lrh9/vGAejdjhcj+ZyJFCeExQE
\n
Azd6'
'AQKBgBhe+FwMhTus961jqEKXBXm0/OaOgzOdgl/asuC8oLU7cAVD7sS2LFcU
\n
u7ofIVIG6cRWRruajIuCdlIcLOedTE4'
'YL5jAuRL1TyVvxMm0FsrkWPABFrHWhsZs
\n
TSzpiOFJkLJTVnT7hlW/+m0qrKiW0zrFza0JaFwP/lj+hRrYGkOl
\n
'
'-----END RSA PRIVATE KEY-----'
}
class
KEYS
:
cookie_encryption_private_key
=
"#ilenskey@rock1#"
class
FILE_PATH
:
CSV_PATH
=
"csv"
IMAGES
=
"images"
DOCS
=
"docs"
PDF
=
"pdf"
TXT
=
"txt"
scripts/core/handler/
supportlens
_handler.py
→
scripts/core/handler/
category_configuration
_handler.py
View file @
1c975cab
...
@@ -8,7 +8,7 @@ from scripts.utils.get_new_id import GetNewId
...
@@ -8,7 +8,7 @@ from scripts.utils.get_new_id import GetNewId
from
scripts.utils.mongo_utility
import
MongoConnect
from
scripts.utils.mongo_utility
import
MongoConnect
class
SupportLens
Handler
:
class
CategoryConfiguration
Handler
:
def
__init__
(
self
):
def
__init__
(
self
):
try
:
try
:
logger
.
debug
(
"Inside the Support lens Handler module"
)
logger
.
debug
(
"Inside the Support lens Handler module"
)
...
@@ -21,7 +21,8 @@ class SupportLensHandler:
...
@@ -21,7 +21,8 @@ class SupportLensHandler:
def
get_category_list
(
self
,
input_json
):
def
get_category_list
(
self
,
input_json
):
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"data"
:
dict
(
headerContent
=
StaticJsons
.
CATEGORY_HEADERCONTENT
,
bodyContent
=
list
())}
"data"
:
dict
(
headerContent
=
StaticJsons
.
CATEGORY_HEADERCONTENT
,
bodyContent
=
list
()),
"message"
:
StatusMessages
.
CATEGORY_LIST
}
logger
.
debug
(
"Inside get_category_list definition"
)
logger
.
debug
(
"Inside get_category_list definition"
)
try
:
try
:
records
=
list
(
self
.
mongo_obj
.
aggregate
(
records
=
list
(
self
.
mongo_obj
.
aggregate
(
...
@@ -35,8 +36,8 @@ class SupportLensHandler:
...
@@ -35,8 +36,8 @@ class SupportLensHandler:
},
{
},
{
'$project'
:
{
'$project'
:
{
'_id'
:
0
,
'_id'
:
0
,
'category_id'
:
'category_id'
,
'category_id'
:
'
$
category_id'
,
'category_name'
:
'CategoryName'
,
'category_name'
:
'
$
CategoryName'
,
"sub_categories"
:
'$SubCategories'
,
"sub_categories"
:
'$SubCategories'
,
'description'
:
'$Description'
'description'
:
'$Description'
}
}
...
@@ -44,10 +45,11 @@ class SupportLensHandler:
...
@@ -44,10 +45,11 @@ class SupportLensHandler:
]))
]))
for
each_record
in
records
:
for
each_record
in
records
:
if
"sub_categories"
in
each_record
:
if
"sub_categories"
in
each_record
:
sub_categories
=
[
item
[
"sub
_
CategoryName"
]
for
item
in
each_record
[
"sub_categories"
]]
sub_categories
=
[
item
[
"subCategoryName"
]
for
item
in
each_record
[
"sub_categories"
]]
each_record
[
"sub_categories"
]
=
","
.
join
(
sub_categories
)
each_record
[
"sub_categories"
]
=
","
.
join
(
sub_categories
)
final_json
[
"data"
][
"bodyContent"
]
=
deepcopy
(
records
)
final_json
[
"data"
][
"bodyContent"
]
=
deepcopy
(
records
)
final_json
[
"status"
]
=
StatusMessages
.
SUCCESS
final_json
[
"status"
]
=
StatusMessages
.
SUCCESS
final_json
[
"message"
]
=
StatusMessages
.
SUCCESS
except
Exception
as
e
:
except
Exception
as
e
:
logger
.
debug
(
"Exception occurred while fetching category list data"
+
str
(
e
))
logger
.
debug
(
"Exception occurred while fetching category list data"
+
str
(
e
))
return
final_json
return
final_json
...
@@ -57,7 +59,7 @@ class SupportLensHandler:
...
@@ -57,7 +59,7 @@ class SupportLensHandler:
"data"
:
dict
()}
"data"
:
dict
()}
logger
.
debug
(
"Inside fetch_category_data definition"
)
logger
.
debug
(
"Inside fetch_category_data definition"
)
try
:
try
:
record
s
=
self
.
mongo_obj
.
aggregate
(
record
_data
=
list
(
self
.
mongo_obj
.
aggregate
(
db_name
=
DBMapping
.
support_lens_configuration
,
db_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
category_configuration
,
collection_name
=
DBMapping
.
category_configuration
,
list_for_aggregation
=
[
list_for_aggregation
=
[
...
@@ -69,16 +71,23 @@ class SupportLensHandler:
...
@@ -69,16 +71,23 @@ class SupportLensHandler:
},
{
},
{
'$project'
:
{
'$project'
:
{
'_id'
:
0
,
'_id'
:
0
,
'category_name'
:
'CategoryName'
,
'category_name'
:
'
$
CategoryName'
,
"sub_categories"
:
'$SubCategories'
,
"sub_categories"
:
'$SubCategories'
,
'description'
:
'$Description'
'description'
:
'$Description'
,
'category_id'
:
'$category_id'
,
'project_id'
:
'$project_id'
}
}
}
}
])
]))
if
len
(
records
[
"sub_categories"
]):
records
=
list
()
if
len
(
record_data
):
records
=
record_data
[
0
]
for
data
in
records
[
"sub_categories"
]:
for
data
in
records
[
"sub_categories"
]:
data
[
'label'
]
=
data
.
pop
(
"sub
_
CategoryName"
)
data
[
'label'
]
=
data
.
pop
(
"subCategoryName"
)
final_json
[
"data"
]
=
deepcopy
(
records
)
final_json
[
"data"
]
=
deepcopy
(
records
)
sub_category_list
=
self
.
fetch_subcategory_data
(
project_id
=
input_json
[
"project_id"
])
if
sub_category_list
:
final_json
[
"data"
][
"total_sub_categories"
]
=
deepcopy
(
sub_category_list
)
final_json
[
"status"
]
=
StatusMessages
.
SUCCESS
final_json
[
"status"
]
=
StatusMessages
.
SUCCESS
except
Exception
as
e
:
except
Exception
as
e
:
logger
.
debug
(
"Exception occurred while fetching category data"
+
str
(
e
))
logger
.
debug
(
"Exception occurred while fetching category data"
+
str
(
e
))
...
@@ -89,40 +98,53 @@ class SupportLensHandler:
...
@@ -89,40 +98,53 @@ class SupportLensHandler:
logger
.
debug
(
"Inside save_category_data definition"
)
logger
.
debug
(
"Inside save_category_data definition"
)
try
:
try
:
for
data
in
input_json
[
"sub_categories"
]:
for
data
in
input_json
[
"sub_categories"
]:
data
[
'sub
_
CategoryName'
]
=
data
.
pop
(
"label"
)
data
[
'subCategoryName'
]
=
data
.
pop
(
"label"
)
if
data
[
"sub_category_id"
]
in
[
str
(),
""
]:
if
"sub_category_id"
not
in
data
or
data
[
"sub_category_id"
]
in
[
str
(),
""
]:
sub_category_id
=
self
.
save_subCategory_data
(
label_name
=
data
[
"sub
_
CategoryName"
],
sub_category_id
=
self
.
save_subCategory_data
(
label_name
=
data
[
"subCategoryName"
],
project_id
=
input_json
[
"project_id"
])
project_id
=
input_json
[
"project_id"
])
if
not
sub_category_id
:
if
not
sub_category_id
:
return
final_json
return
final_json
data
[
"sub_category_id"
]
=
sub_category_id
data
[
"sub_category_id"
]
=
sub_category_id
user
=
self
.
new_id
.
get_user_id
()
if
"user_id"
in
user
:
user
=
user
[
"user_id"
]
if
input_json
[
"type"
]
==
"save"
:
if
input_json
[
"type"
]
==
"save"
:
insert_json
=
dict
(
CategoryName
=
input_json
[
"category_name"
],
Description
=
input_json
[
"description"
],
insert_json
=
dict
(
CategoryName
=
input_json
[
"category_name"
],
Description
=
input_json
[
"description"
],
SubCategories
=
input_json
[
"sub_categories"
],
SubCategories
=
input_json
[
"sub_categories"
],
category_id
=
"category_"
+
self
.
new_id
.
get_next_id
(
"category"
),
category_id
=
"category_"
+
self
.
new_id
.
get_next_id
(
"category"
),
project_id
=
input_json
[
"project_id"
],
project_id
=
input_json
[
"project_id"
],
created_on
=
time
.
time
(),
created_on
=
time
.
time
(),
created_by
=
self
.
new_id
.
get_user_id
()
,
created_by
=
user
,
last_updated_on
=
time
.
time
(),
last_updated_on
=
time
.
time
(),
last_updated_by
=
self
.
new_id
.
get_user_id
()
last_updated_by
=
user
)
)
self
.
mongo_obj
.
insert_one
(
database_name
=
DBMapping
.
support_lens_configuration
,
self
.
mongo_obj
.
insert_one
(
database_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
category_configuration
,
collection_name
=
DBMapping
.
category_configuration
,
json_data
=
insert_json
)
json_data
=
insert_json
)
if
input_json
[
"type"
]
==
"edit"
:
if
input_json
[
"type"
]
==
"edit"
:
update_json
=
dict
(
CategoryName
=
input_json
[
"category_name"
],
Description
=
input_json
[
"description"
],
category_data
=
list
(
self
.
mongo_obj
.
aggregate
(
db_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
category_configuration
,
list_for_aggregation
=
[
{
'$match'
:
{
'category_id'
:
input_json
[
"category_id"
]
}
}
]))
if
len
(
category_data
):
update_json
=
category_data
[
0
]
update_json
.
update
(
CategoryName
=
input_json
[
"category_name"
],
Description
=
input_json
[
"description"
],
SubCategories
=
input_json
[
"sub_categories"
],
SubCategories
=
input_json
[
"sub_categories"
],
category_id
=
input_json
[
"category_id"
],
category_id
=
input_json
[
"category_id"
],
created_on
=
time
.
time
(),
project_id
=
input_json
[
"project_id"
],
project_id
=
input_json
[
"project_id"
],
created_by
=
self
.
new_id
.
get_user_id
(),
last_updated_on
=
time
.
time
(),
last_updated_on
=
time
.
time
(),
last_updated_by
=
self
.
new_id
.
get_user_id
()
last_updated_by
=
user
)
)
query_json
=
dict
(
project_id
=
input_json
[
"project_id"
],
category_id
=
input_json
[
"category_id"
])
query_json
=
dict
(
project_id
=
input_json
[
"project_id"
],
category_id
=
input_json
[
"category_id"
])
self
.
mongo_obj
.
update_one
(
db_name
=
DBMapping
.
support_lens_configuration
,
self
.
mongo_obj
.
update_one
(
db_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
category_configuration
,
collection_name
=
DBMapping
.
category_configuration
,
set_json
=
update_json
,
query
=
query_json
,
upsert
=
True
)
set_json
=
update_json
,
query
=
query_json
)
final_json
=
dict
(
status
=
StatusMessages
.
SUCCESS
,
message
=
"Category saved Successfully"
)
final_json
=
dict
(
status
=
StatusMessages
.
SUCCESS
,
message
=
"Category saved Successfully"
)
except
Exception
as
e
:
except
Exception
as
e
:
...
@@ -131,7 +153,7 @@ class SupportLensHandler:
...
@@ -131,7 +153,7 @@ class SupportLensHandler:
def
save_subCategory_data
(
self
,
label_name
,
project_id
):
def
save_subCategory_data
(
self
,
label_name
,
project_id
):
try
:
try
:
insert_json
=
dict
(
sub
_
CategoryName
=
label_name
,
insert_json
=
dict
(
subCategoryName
=
label_name
,
sub_category_id
=
"sub_category_"
+
self
.
new_id
.
get_next_id
(
"sub_category"
),
sub_category_id
=
"sub_category_"
+
self
.
new_id
.
get_next_id
(
"sub_category"
),
project_id
=
project_id
,
project_id
=
project_id
,
created_on
=
time
.
time
(),
created_on
=
time
.
time
(),
...
@@ -151,7 +173,33 @@ class SupportLensHandler:
...
@@ -151,7 +173,33 @@ class SupportLensHandler:
self
.
mongo_obj
.
delete_one_record
(
db_name
=
DBMapping
.
support_lens_configuration
,
self
.
mongo_obj
.
delete_one_record
(
db_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
category_configuration
,
collection_name
=
DBMapping
.
category_configuration
,
query_json
=
query_json
)
query_json
=
query_json
)
# self.mongo_obj.delete_many_with_filter(db_name=DBMapping.support_lens_configuration,
# collection_name=DBMapping.resolver_configuration,
# filter_query=query_json)
final_json
=
dict
(
status
=
StatusMessages
.
SUCCESS
,
message
=
"Category deleted Successfully"
)
final_json
=
dict
(
status
=
StatusMessages
.
SUCCESS
,
message
=
"Category deleted Successfully"
)
except
Exception
as
e
:
except
Exception
as
e
:
logger
.
debug
(
"Exception occurred while deleting category data"
+
str
(
e
))
logger
.
debug
(
"Exception occurred while deleting category data"
+
str
(
e
))
return
final_json
return
final_json
def
fetch_subcategory_data
(
self
,
project_id
):
try
:
records
=
list
(
self
.
mongo_obj
.
aggregate
(
db_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
subcategory_configuration
,
list_for_aggregation
=
[
{
'$match'
:
{
'project_id'
:
project_id
}
},
{
'$project'
:
{
'_id'
:
0
,
'value'
:
'$sub_category_id'
,
'label'
:
'$subCategoryName'
}
}
]))
return
records
except
Exception
as
e
:
logger
.
debug
(
"Exception while fetching sub category details:"
+
str
(
e
))
return
False
scripts/core/handler/resolver_setup_handler.py
0 → 100644
View file @
1c975cab
import
time
from
copy
import
deepcopy
from
scripts.config.app_constants
import
StatusMessages
,
StaticJsons
,
DBMapping
from
scripts.config.db_connection_obj
import
ConnectionObj
from
scripts.logging.logger
import
logger
from
scripts.utils.get_new_id
import
GetNewId
from
scripts.utils.mongo_utility
import
MongoConnect
class
ResolverSetupHandler
:
def
__init__
(
self
):
try
:
logger
.
debug
(
"Inside the Resolver setup module"
)
self
.
new_id
=
GetNewId
()
self
.
mongo_obj
=
ConnectionObj
.
mongo_connection_obj
if
not
ConnectionObj
.
mongo_connection_obj
:
self
.
mongo_obj
=
ConnectionObj
.
mongo_connection_obj
=
MongoConnect
()
except
Exception
as
e
:
logger
.
exception
(
"Exception in the data utility definition"
+
str
(
e
))
def
get_resolver_list
(
self
,
input_json
):
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"data"
:
dict
(
headerContent
=
StaticJsons
.
RESOLEVER_HEADERCONTENT
,
bodyContent
=
list
())}
logger
.
debug
(
"Inside get_category_list definition"
)
try
:
records
=
list
(
self
.
mongo_obj
.
aggregate
(
db_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
resolver_configuration
,
list_for_aggregation
=
[
{
'$match'
:
{
'project_id'
:
input_json
[
"project_id"
]
}
},
{
'$project'
:
{
'_id'
:
0
,
'resolver_group_id'
:
'$resolver_group_id'
,
'resolver_group_name'
:
'$ResolverGroupname'
,
"sub_categories"
:
'$SubCategories'
,
"category"
:
"$CategoryName"
,
'description'
:
'$Description'
}
}
]))
for
each_record
in
records
:
if
"sub_categories"
in
each_record
:
sub_categories
=
[
item
[
"sub_CategoryName"
]
for
item
in
each_record
[
"sub_categories"
]]
each_record
[
"sub_categories"
]
=
","
.
join
(
sub_categories
)
final_json
[
"data"
][
"bodyContent"
]
=
deepcopy
(
records
)
final_json
[
"status"
]
=
StatusMessages
.
SUCCESS
except
Exception
as
e
:
logger
.
debug
(
"Exception occurred while fetching resolver list data"
+
str
(
e
))
return
final_json
def
save_resolver_data
(
self
,
input_json
):
final_json
=
dict
(
status
=
StatusMessages
.
FAILED
,
message
=
StatusMessages
.
RESOLVER_DATA
)
logger
.
debug
(
"Inside save_resolver_data definition"
)
try
:
for
data
in
input_json
[
"sub_categories"
]:
data
[
'sub_CategoryName'
]
=
data
.
pop
(
"label"
)
category_data
=
list
(
self
.
mongo_obj
.
aggregate
(
db_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
category_configuration
,
list_for_aggregation
=
[
{
'$match'
:
{
'category_id'
:
input_json
[
"category"
]
}
},
{
'$project'
:
{
'_id'
:
0
,
"category"
:
"$CategoryName"
}
}
]))
if
len
(
category_data
):
category_data
=
category_data
[
0
]
user
=
self
.
new_id
.
get_user_id
()
if
"user_id"
in
user
:
user
=
user
[
"user_id"
]
if
input_json
[
"type"
]
==
"save"
:
insert_json
=
dict
(
ResolverGroupname
=
input_json
[
"resolver_group_name"
],
Description
=
input_json
[
"description"
],
SubCategories
=
input_json
[
"sub_categories"
],
resolver_group_id
=
"resolver_group_"
+
self
.
new_id
.
get_next_id
(
"resolver_group"
),
category_id
=
input_json
[
"category"
],
CategoryName
=
category_data
.
get
(
"category"
,
None
),
project_id
=
input_json
[
"project_id"
],
created_on
=
time
.
time
(),
created_by
=
user
,
last_updated_on
=
time
.
time
(),
last_updated_by
=
user
)
self
.
mongo_obj
.
insert_one
(
database_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
resolver_configuration
,
json_data
=
insert_json
)
if
input_json
[
"type"
]
==
"edit"
:
resolver_data
=
list
(
self
.
mongo_obj
.
aggregate
(
db_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
resolver_configuration
,
list_for_aggregation
=
[
{
'$match'
:
{
'resolver_group_id'
:
input_json
[
"resolver_group_id"
]
}
}]))
for
record
in
resolver_data
:
record
.
update
(
CategoryName
=
category_data
.
get
(
"category"
,
None
),
Description
=
input_json
[
"description"
],
SubCategories
=
input_json
[
"sub_categories"
],
category_id
=
input_json
[
"category"
],
project_id
=
input_json
[
"project_id"
],
last_updated_on
=
time
.
time
(),
last_updated_by
=
user
)
query_json
=
dict
(
project_id
=
input_json
[
"project_id"
],
resolver_group_id
=
input_json
[
"resolver_group_id"
])
self
.
mongo_obj
.
update_one
(
db_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
resolver_configuration
,
set_json
=
record
,
query
=
query_json
)
final_json
=
dict
(
status
=
StatusMessages
.
SUCCESS
,
message
=
"Category saved Successfully"
)
except
Exception
as
e
:
logger
.
debug
(
"Exception occurred while saving category data"
+
str
(
e
))
return
final_json
def
fetch_resolver_data
(
self
,
input_json
):
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"data"
:
dict
()}
logger
.
debug
(
"Inside fetch_category_data definition"
)
try
:
records
=
list
(
self
.
mongo_obj
.
aggregate
(
db_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
resolver_configuration
,
list_for_aggregation
=
[
{
'$match'
:
{
'project_id'
:
input_json
[
"project_id"
],
"resolver_group_id"
:
input_json
[
"resolver_group_id"
]
}
},
{
'$project'
:
{
'_id'
:
0
,
"sub_categories"
:
'$SubCategories'
,
'description'
:
'$Description'
,
'resolver_group_name'
:
'$ResolverGroupname'
,
'category'
:
'$category_id'
,
"resolver_group_id"
:
"$resolver_group_id"
}
}
]))
if
len
(
records
):
records
=
records
[
0
]
for
data
in
records
[
"sub_categories"
]:
data
[
'label'
]
=
data
.
pop
(
"sub_CategoryName"
)
final_json
[
"data"
]
=
deepcopy
(
records
)
final_json
[
"status"
]
=
StatusMessages
.
SUCCESS
except
Exception
as
e
:
logger
.
debug
(
"Exception occurred while fetching reslover data"
+
str
(
e
))
return
final_json
def
delete_resolver_data
(
self
,
input_json
):
final_json
=
dict
(
status
=
StatusMessages
.
FAILED
,
message
=
StatusMessages
.
RESOLVER_DELETE
)
try
:
query_json
=
dict
(
project_id
=
input_json
[
"project_id"
],
resolver_group_id
=
input_json
[
"resolver_group_id"
])
self
.
mongo_obj
.
delete_one_record
(
db_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
resolver_configuration
,
query_json
=
query_json
)
final_json
=
dict
(
status
=
StatusMessages
.
SUCCESS
,
message
=
"Resolved deleted Successfully"
)
except
Exception
as
e
:
logger
.
debug
(
"Exception occurred while deleting Resolver data"
+
str
(
e
))
return
final_json
def
get_category_metadata
(
self
,
input_json
):
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"data"
:
dict
(),
"message"
:
StatusMessages
.
CATEGORY_FETCH
}
logger
.
debug
(
"Inside get category metadata definition"
)
try
:
category_data
=
list
(
self
.
mongo_obj
.
aggregate
(
db_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
category_configuration
,
list_for_aggregation
=
[
{
'$match'
:
{
'project_id'
:
input_json
[
"project_id"
]
}
},
{
'$project'
:
{
'_id'
:
0
,
"label"
:
"$CategoryName"
,
"value"
:
"$category_id"
,
"sub_categories"
:
"$SubCategories"
}
}
]))
category_list
=
list
()
sub_category_list
=
dict
()
for
category
in
category_data
:
category_list
.
append
(
dict
(
label
=
category
[
"label"
],
value
=
category
[
"value"
]))
if
category
[
"value"
]
not
in
sub_category_list
:
sub_category_list
.
update
({
category
[
"value"
]:
list
()})
for
data
in
category
[
"sub_categories"
]:
data
[
'label'
]
=
data
.
pop
(
"subCategoryName"
)
sub_category_list
[
category
[
"value"
]]
=
category
[
"sub_categories"
]
final_json
.
update
(
status
=
StatusMessages
.
SUCCESS
,
message
=
StatusMessages
.
SUCCESS
,
data
=
dict
(
categories
=
deepcopy
(
category_list
),
sub_categories
=
deepcopy
(
sub_category_list
)))
except
Exception
as
e
:
logger
.
debug
(
"Exception occurred while deleting Resolver data"
+
str
(
e
))
return
final_json
scripts/core/handler/supportcase_setup_handler.py
0 → 100644
View file @
1c975cab
import
base64
import
os
import
time
from
copy
import
deepcopy
from
datetime
import
datetime
from
scripts.config
import
app_configuration
from
scripts.config.app_constants
import
DBMapping
,
CaseStatus
,
StatusMessages
,
FILE_PATH
,
StaticJsons
from
scripts.config.db_connection_obj
import
ConnectionObj
from
scripts.logging.logger
import
logger
from
scripts.utils.get_new_id
import
GetNewId
from
scripts.utils.mongo_utility
import
MongoConnect
class
TicketSetupHandler
:
def
__init__
(
self
):
try
:
logger
.
debug
(
"Inside the Resolver setup module"
)
self
.
new_id
=
GetNewId
()
self
.
mongo_obj
=
ConnectionObj
.
mongo_connection_obj
if
not
ConnectionObj
.
mongo_connection_obj
:
self
.
mongo_obj
=
ConnectionObj
.
mongo_connection_obj
=
MongoConnect
()
except
Exception
as
e
:
logger
.
exception
(
"Exception in the data utility definition"
+
str
(
e
))
def
save_ticket_data
(
self
,
input_json
):
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"message"
:
StatusMessages
.
FAILED
}
try
:
logger
.
debug
(
"Inside save ticket data definition"
)
category_data
=
list
(
self
.
mongo_obj
.
aggregate
(
db_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
category_configuration
,
list_for_aggregation
=
[
{
'$match'
:
{
'category_id'
:
input_json
[
"category"
]
}
},
{
'$project'
:
{
'_id'
:
0
,
"CategoryName"
:
"$CategoryName"
}
}
]))
project_data
=
self
.
mongo_obj
.
find_one
(
db_name
=
DBMapping
.
ilens_configuration
,
collection_name
=
DBMapping
.
customer_projects
,
query
=
{
"customer_project_id"
:
input_json
[
"project_id"
]})
subcategory_data
=
list
(
self
.
mongo_obj
.
aggregate
(
db_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
subcategory_configuration
,
list_for_aggregation
=
[
{
'$match'
:
{
'sub_category_id'
:
input_json
[
"subCategory"
]
}
},
{
'$project'
:
{
'_id'
:
0
,
"subCategoryName"
:
"$subCategoryName"
}
}
]))
site_data
=
list
(
self
.
mongo_obj
.
aggregate
(
db_name
=
DBMapping
.
ilens_configuration
,
collection_name
=
DBMapping
.
site_conf
,
list_for_aggregation
=
[
{
'$match'
:
{
'site_id'
:
input_json
[
"site"
]
}
},
{
'$project'
:
{
'_id'
:
0
}
}
]))
# user_data = list(self.mongo_obj.aggregate(
# db_name=DBMapping.ilens_configuration,
# collection_name=DBMapping.user,
# list_for_aggregation=[
# {
# '$match': {
# 'user_id': {"$in": input_json["contacts"]}
# }
# }, {
# '$project': {
# '_id': 0
# }
# }
# ]))
if
len
(
category_data
):
category_data
=
category_data
[
0
]
else
:
final_json
=
dict
(
message
=
"Invalid category selected"
,
status
=
StatusMessages
.
FAILED
)
return
final_json
if
len
(
subcategory_data
):
subcategory_data
=
subcategory_data
[
0
]
else
:
final_json
=
dict
(
message
=
"Invalid subcategory selected"
,
status
=
StatusMessages
.
FAILED
)
return
final_json
if
len
(
site_data
):
site_data
=
site_data
[
0
]
else
:
final_json
=
dict
(
message
=
"Invalid Site selected"
,
status
=
StatusMessages
.
FAILED
)
return
final_json
user
=
self
.
new_id
.
get_user_id
()
if
"user_id"
in
user
:
user_data
=
self
.
mongo_obj
.
find_one
(
db_name
=
DBMapping
.
ilens_configuration
,
collection_name
=
DBMapping
.
user
,
query
=
{
"user_id"
:
user
[
"user_id"
]})
user
=
dict
(
userName
=
user_data
[
"username"
],
user_id
=
user
)
if
input_json
[
"type"
]
==
"create"
:
case_id
=
"case_"
+
self
.
new_id
.
get_next_id
(
"case"
)
else
:
case_id
=
input_json
[
"case_id"
]
if
"deletedList"
in
input_json
and
len
(
input_json
[
"deletedList"
]):
for
file
in
input_json
[
"deletedList"
]:
response
=
self
.
delete_file_details
(
file
,
input_json
[
"case_id"
])
if
len
(
input_json
[
"fileNameBlock"
]):
for
file_data
in
input_json
[
"fileNameBlock"
]:
if
"attachment"
not
in
file_data
:
continue
response
=
self
.
save_file_details
(
file_data
,
case_id
)
del
file_data
[
"attachment"
]
if
input_json
[
"type"
]
.
lower
()
==
"create"
:
insert_json
=
dict
(
CategoryName
=
category_data
[
"CategoryName"
],
ProjectName
=
project_data
[
"customer_project_name"
],
category_id
=
input_json
[
"category"
],
subCategoryName
=
subcategory_data
[
"subCategoryName"
],
sub_category_id
=
input_json
[
"subCategory"
],
Description
=
input_json
[
"description"
],
subject
=
input_json
[
"subject"
],
site_id
=
input_json
[
"site"
],
siteName
=
site_data
[
"site_name"
],
case_id
=
case_id
,
project_id
=
input_json
[
"project_id"
],
case_status
=
CaseStatus
.
OPEN
,
created_on
=
time
.
time
(),
created_by
=
user
,
file_details
=
input_json
[
"fileNameBlock"
],
last_updated_on
=
time
.
time
(),
last_updated_by
=
user
,
user_details
=
input_json
[
"contacts"
]
)
self
.
mongo_obj
.
insert_one
(
database_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
ticket_configuration
,
json_data
=
insert_json
)
if
input_json
[
"type"
]
.
lower
()
==
"edit"
:
query_json
=
{
"case_id"
:
input_json
[
"case_id"
]}
case_data
=
self
.
mongo_obj
.
find_one
(
db_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
ticket_configuration
,
query
=
query_json
,
search_json
=
{
"_id"
:
0
})
if
len
(
case_data
):
case_data
.
update
(
CategoryName
=
category_data
[
"CategoryName"
],
category_id
=
input_json
[
"category"
],
subCategoryName
=
subcategory_data
[
"subCategoryName"
],
sub_category_id
=
input_json
[
"subCategory"
],
Description
=
input_json
[
"description"
],
subject
=
input_json
[
"subject"
],
site_id
=
input_json
[
"site"
],
siteName
=
site_data
[
"site_name"
],
project_id
=
input_json
[
"project_id"
],
case_status
=
CaseStatus
.
OPEN
,
file_details
=
input_json
[
"fileNameBlock"
],
last_updated_on
=
time
.
time
(),
last_updated_by
=
user
,
user_details
=
input_json
[
"contacts"
]
)
self
.
mongo_obj
.
update_one
(
db_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
ticket_configuration
,
set_json
=
case_data
,
query
=
query_json
)
final_json
=
dict
(
status
=
StatusMessages
.
SUCCESS
,
message
=
"Casedata saved successfully"
)
except
Exception
as
e
:
logger
.
exception
(
"Exception while saving ticket data"
+
str
(
e
))
return
final_json
def
save_file_details
(
self
,
input_json
,
case_id
):
try
:
file_save_path
=
self
.
check_file_extenstions
(
input_json
[
"name"
],
case_id
)
if
not
file_save_path
:
return
False
decoded_data
=
base64
.
b64decode
(
str
(
input_json
[
"attachment"
]))
if
not
os
.
path
.
exists
(
file_save_path
):
os
.
makedirs
(
file_save_path
)
with
open
(
f
"{file_save_path}/{input_json['name']}"
,
"wb"
)
as
file
:
file
.
write
(
decoded_data
)
file
.
close
()
return
True
except
Exception
as
e
:
logger
.
exception
(
"Exception while saving ticket data"
+
str
(
e
))
return
False
def
delete_file_details
(
self
,
file_name
,
case_id
):
try
:
file_delete_path
=
self
.
check_file_extenstions
(
file_name
,
case_id
)
if
not
file_delete_path
:
return
False
if
os
.
path
.
exists
(
f
"{file_delete_path}
\
{file_name}"
):
os
.
remove
(
f
"{file_delete_path}
\
{file_name}"
)
return
True
except
Exception
as
e
:
logger
.
exception
(
"Exception while saving ticket data"
+
str
(
e
))
return
False
def
check_file_extenstions
(
self
,
file_name
,
case_id
):
file_extenstion
=
file_name
.
split
(
"."
)[
-
1
]
.
lower
()
if
file_extenstion
in
[
"csv"
]:
file_save_path
=
os
.
path
.
join
(
app_configuration
.
FILES_SAVE_PATH
,
FILE_PATH
.
CSV_PATH
,
case_id
)
elif
file_extenstion
in
[
"jpg"
,
"jpeg"
,
"png"
,
"svg"
]:
file_save_path
=
os
.
path
.
join
(
app_configuration
.
FILES_SAVE_PATH
,
FILE_PATH
.
IMAGES
,
case_id
)
elif
file_extenstion
in
[
"txt"
]:
file_save_path
=
os
.
path
.
join
(
app_configuration
.
FILES_SAVE_PATH
,
FILE_PATH
.
TXT
,
case_id
)
elif
file_extenstion
in
[
"pdf"
]:
file_save_path
=
os
.
path
.
join
(
app_configuration
.
FILES_SAVE_PATH
,
FILE_PATH
.
PDF
,
case_id
)
elif
file_extenstion
in
[
"docs"
,
"docx"
,
"doc"
]:
file_save_path
=
os
.
path
.
join
(
app_configuration
.
FILES_SAVE_PATH
,
FILE_PATH
.
DOCS
,
case_id
)
else
:
return
False
return
file_save_path
def
fetch_ticket_details
(
self
,
input_json
):
final_json
=
dict
(
status
=
StatusMessages
.
FAILED
,
message
=
StatusMessages
.
FAILED
,
data
=
list
())
try
:
response_data
=
list
(
self
.
mongo_obj
.
aggregate
(
db_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
ticket_configuration
,
list_for_aggregation
=
[
{
'$match'
:
{
'case_id'
:
input_json
[
"case_id"
]
}
},
{
'$project'
:
{
'_id'
:
0
,
'category'
:
"$category_id"
,
'subCategory'
:
'$sub_category_id'
,
'site'
:
'$site_id'
,
'subject'
:
"$subject"
,
'fileNameBlock'
:
'$file_details'
,
'description'
:
'$Description'
,
'contacts'
:
'$user_details'
,
}
}
]))
if
len
(
response_data
):
response_data
=
response_data
[
0
]
final_json
=
dict
(
status
=
StatusMessages
.
SUCCESS
,
message
=
StatusMessages
.
SUCCESS
,
data
=
deepcopy
(
response_data
))
except
Exception
as
e
:
logger
.
exception
(
"Exception while fetching case details"
+
str
(
e
))
return
final_json
def
delete_case_details
(
self
,
input_json
):
try
:
if
"case_id"
in
input_json
and
input_json
[
"case_id"
]
!=
""
:
query
=
{
"case_id"
:
input_json
[
"case_id"
]}
new_values
=
{
"$set"
:
{
"case_status"
:
CaseStatus
.
DELETE
}}
ConnectionObj
.
mongo_connection_obj
.
update_one
(
db_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
ticket_configuration
,
query
=
query
,
set_json
=
new_values
)
return
{
"status"
:
"success"
,
"message"
:
"Case deleted successfully"
}
except
Exception
as
e
:
logger
.
exception
(
str
(
e
))
return
{
"status"
:
"failed"
,
"message"
:
str
(
e
)}
def
get_support_case_table_details
(
self
,
input_json
):
try
:
final_json
=
dict
(
status
=
StatusMessages
.
FAILED
,
message
=
StatusMessages
.
FAILED
,
data
=
dict
(
tableData
=
dict
(
headerContent
=
StaticJsons
.
SUPPORTLENS_FETCHTABLE_HEADERCONTENT
),
tableActions
=
StaticJsons
.
SUPPORTLENS_FETCHTABLE_TABLEACTIONS
,
enableRowExpand
=
True
,
table_type
=
"infinite_scroll"
,
hideSearch
=
True
,
server_search
=
True
))
records
=
list
(
self
.
mongo_obj
.
aggregate
(
db_name
=
DBMapping
.
support_lens_configuration
,
collection_name
=
DBMapping
.
ticket_configuration
,
list_for_aggregation
=
[
{
'$match'
:
{
'project_id'
:
input_json
[
"project_id"
]
}
},
{
'$project'
:
{
'_id'
:
0
,
'case_number'
:
'$case_id'
,
'status'
:
'$case_status'
,
"subject"
:
'$subject'
,
"customer_project_id"
:
"$ProjectName"
,
'last_updated'
:
'$last_updated_on'
,
'created_by'
:
'$created_by'
}
}
]))
for
record
in
records
:
record
[
"created_by"
]
=
record
[
"created_by"
][
"userName"
]
record
[
"last_updated"
]
=
datetime
.
fromtimestamp
(
record
[
"last_updated"
])
.
strftime
(
"
%
d
%
b
%
Y,
%
H:
%
M"
)
record
[
"expandData"
]
=
[{
"label"
:
str
(
key
)
.
replace
(
"_"
,
" "
)
.
replace
(
"-"
,
" "
)
.
title
(),
"value"
:
value
}
for
key
,
value
in
record
.
items
()]
final_json
.
update
(
status
=
StatusMessages
.
SUCCESS
,
message
=
StatusMessages
.
SUCCESS
)
final_json
[
"data"
][
"tableData"
]
.
update
(
bodyContent
=
deepcopy
(
records
))
final_json
[
"data"
][
"total_no"
]
=
len
(
records
)
except
Exception
as
e
:
logger
.
exception
(
str
(
e
))
return
final_json
scripts/core/services/
supportlens_
category_configuration.py
→
scripts/core/services/category_configuration.py
View file @
1c975cab
from
flask
import
Blueprint
,
request
from
flask
import
Blueprint
,
request
from
scripts.config.app_constants
import
Endpoints
,
StatusMessages
from
scripts.config.app_constants
import
Endpoints
,
StatusMessages
from
scripts.core.handler.
supportlens_handler
import
SupportLens
Handler
from
scripts.core.handler.
category_configuration_handler
import
CategoryConfiguration
Handler
from
scripts.logging.logger
import
logger
from
scripts.logging.logger
import
logger
from
scripts.utils.AESEnc
import
apply_encryption
from
scripts.utils.AESEnc
import
apply_encryption
support_lens_blueprint
=
Blueprint
(
"support_lens_blueprint"
,
__name__
)
category_configuration
=
Blueprint
(
"support_lens_blueprint"
,
__name__
)
handler_obj
=
SupportLens
Handler
()
handler_obj
=
CategoryConfiguration
Handler
()
@
support_lens_blueprint
.
route
(
Endpoints
.
category_list
,
methods
=
[
'POST'
])
@
category_configuration
.
route
(
Endpoints
.
list_category
,
methods
=
[
'POST'
])
@
apply_encryption
@
apply_encryption
def
get_category_list
():
def
get_category_list
():
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"message"
:
StatusMessages
.
CATEGORY_LIST
}
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"message"
:
StatusMessages
.
CATEGORY_LIST
}
...
@@ -22,7 +22,7 @@ def get_category_list():
...
@@ -22,7 +22,7 @@ def get_category_list():
return
final_json
return
final_json
@
support_lens_blueprint
.
route
(
Endpoints
.
category_save
,
methods
=
[
'POST'
])
@
category_configuration
.
route
(
Endpoints
.
save_category
,
methods
=
[
'POST'
])
@
apply_encryption
@
apply_encryption
def
save_category_data
():
def
save_category_data
():
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"message"
:
StatusMessages
.
CATEGORY_DATA
}
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"message"
:
StatusMessages
.
CATEGORY_DATA
}
...
@@ -36,7 +36,7 @@ def save_category_data():
...
@@ -36,7 +36,7 @@ def save_category_data():
return
final_json
return
final_json
@
support_lens_blueprint
.
route
(
Endpoints
.
category_edit
,
methods
=
[
'POST'
])
@
category_configuration
.
route
(
Endpoints
.
edit_category
,
methods
=
[
'POST'
])
@
apply_encryption
@
apply_encryption
def
edit_category_data
():
def
edit_category_data
():
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"message"
:
StatusMessages
.
CATEGORY_DATA
}
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"message"
:
StatusMessages
.
CATEGORY_DATA
}
...
@@ -50,7 +50,7 @@ def edit_category_data():
...
@@ -50,7 +50,7 @@ def edit_category_data():
return
final_json
return
final_json
@
support_lens_blueprint
.
route
(
Endpoints
.
category_fetch
,
methods
=
[
'POST'
])
@
category_configuration
.
route
(
Endpoints
.
fetch_category
,
methods
=
[
'POST'
])
@
apply_encryption
@
apply_encryption
def
fetch_category_data
():
def
fetch_category_data
():
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"message"
:
StatusMessages
.
CATEGORY_FETCH
}
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"message"
:
StatusMessages
.
CATEGORY_FETCH
}
...
@@ -63,9 +63,9 @@ def fetch_category_data():
...
@@ -63,9 +63,9 @@ def fetch_category_data():
return
final_json
return
final_json
@
support_lens_blueprint
.
route
(
Endpoints
.
category_fetch
,
methods
=
[
'POST'
])
@
category_configuration
.
route
(
Endpoints
.
delete_category
,
methods
=
[
'POST'
])
@
apply_encryption
@
apply_encryption
def
fetch
_category_data
():
def
delete
_category_data
():
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"message"
:
StatusMessages
.
CATEGORY_DATA
}
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"message"
:
StatusMessages
.
CATEGORY_DATA
}
try
:
try
:
input_data
=
request
.
data
input_data
=
request
.
data
...
...
scripts/core/services/resolver_setup_configuration.py
0 → 100644
View file @
1c975cab
from
flask
import
Blueprint
,
request
from
scripts.config.app_constants
import
Endpoints
,
StatusMessages
from
scripts.core.handler.resolver_setup_handler
import
ResolverSetupHandler
from
scripts.logging.logger
import
logger
from
scripts.utils.AESEnc
import
apply_encryption
resolver_configuration
=
Blueprint
(
"resolver_blueprint"
,
__name__
)
handler_obj
=
ResolverSetupHandler
()
@
resolver_configuration
.
route
(
Endpoints
.
list_resolver
,
methods
=
[
'POST'
])
@
apply_encryption
def
get_resolver_list
():
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"message"
:
StatusMessages
.
RESOLVER_LIST
}
try
:
input_data
=
request
.
data
# input_data = request.get_json()
final_json
=
handler_obj
.
get_resolver_list
(
input_json
=
input_data
)
except
Exception
as
e
:
logger
.
exception
(
"Exception ->
%
s"
%
str
(
e
))
return
final_json
@
resolver_configuration
.
route
(
Endpoints
.
save_resolver
,
methods
=
[
'POST'
])
@
apply_encryption
def
save_resolver_data
():
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"message"
:
StatusMessages
.
RESOLVER_DATA
}
try
:
input_data
=
request
.
data
# input_data = request.get_json()
input_data
.
update
(
type
=
"save"
)
final_json
=
handler_obj
.
save_resolver_data
(
input_json
=
input_data
)
except
Exception
as
e
:
logger
.
exception
(
"Exception ->
%
s"
%
str
(
e
))
return
final_json
@
resolver_configuration
.
route
(
Endpoints
.
edit_resolver
,
methods
=
[
'POST'
])
@
apply_encryption
def
edit_resolver_data
():
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"message"
:
StatusMessages
.
RESOLVER_DATA
}
try
:
input_data
=
request
.
data
# input_data = request.get_json()
input_data
.
update
(
type
=
"edit"
)
final_json
=
handler_obj
.
save_resolver_data
(
input_json
=
input_data
)
except
Exception
as
e
:
logger
.
exception
(
"Exception ->
%
s"
%
str
(
e
))
return
final_json
@
resolver_configuration
.
route
(
Endpoints
.
fetch_resolver_data
,
methods
=
[
'POST'
])
@
apply_encryption
def
fetch_resolver_data
():
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"message"
:
StatusMessages
.
CATEGORY_FETCH
}
try
:
input_data
=
request
.
data
# input_data = request.get_json()
final_json
=
handler_obj
.
fetch_resolver_data
(
input_json
=
input_data
)
except
Exception
as
e
:
logger
.
exception
(
"Exception ->
%
s"
%
str
(
e
))
return
final_json
@
resolver_configuration
.
route
(
Endpoints
.
fetch_resolver_meta
,
methods
=
[
'POST'
])
@
apply_encryption
def
get_resolver_metadata
():
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"message"
:
StatusMessages
.
RESOLVER_FETCH
}
try
:
input_data
=
request
.
data
# input_data = request.get_json()
final_json
=
handler_obj
.
get_category_metadata
(
input_json
=
input_data
)
except
Exception
as
e
:
logger
.
exception
(
"Exception ->
%
s"
%
str
(
e
))
return
final_json
@
resolver_configuration
.
route
(
Endpoints
.
delete_resolver
,
methods
=
[
'POST'
])
@
apply_encryption
def
delete_resolve_data
():
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"message"
:
StatusMessages
.
CATEGORY_DATA
}
try
:
input_data
=
request
.
data
# input_data = request.get_json()
final_json
=
handler_obj
.
delete_resolver_data
(
input_json
=
input_data
)
except
Exception
as
e
:
logger
.
exception
(
"Exception ->
%
s"
%
str
(
e
))
return
final_json
scripts/core/services/supportcase_setup_configuration.py
0 → 100644
View file @
1c975cab
from
flask
import
Blueprint
,
request
from
scripts.config.app_constants
import
Endpoints
,
StatusMessages
from
scripts.core.handler.supportcase_setup_handler
import
TicketSetupHandler
from
scripts.logging.logger
import
logger
from
scripts.utils.AESEnc
import
apply_encryption
supportcase_setup_configuration
=
Blueprint
(
"ticket_setup_blueprint"
,
__name__
)
handler_obj
=
TicketSetupHandler
()
@
supportcase_setup_configuration
.
route
(
Endpoints
.
save_support_case
,
methods
=
[
'POST'
])
@
apply_encryption
def
save_support_case_details
():
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"message"
:
StatusMessages
.
SUPPORTCASE_SAVE
}
try
:
input_data
=
request
.
data
# input_data = request.get_json()
final_json
=
handler_obj
.
save_ticket_data
(
input_data
)
except
Exception
as
e
:
logger
.
exception
(
"Exception ->
%
s"
%
str
(
e
))
return
final_json
@
supportcase_setup_configuration
.
route
(
Endpoints
.
fetch_support_case
,
methods
=
[
'POST'
])
@
apply_encryption
def
fetch_support_case_details
():
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"message"
:
StatusMessages
.
SUPPORTCASE_SAVE
}
try
:
input_data
=
request
.
data
# input_data = request.get_json()
final_json
=
handler_obj
.
fetch_ticket_details
(
input_data
)
except
Exception
as
e
:
logger
.
exception
(
"Exception ->
%
s"
%
str
(
e
))
return
final_json
@
supportcase_setup_configuration
.
route
(
Endpoints
.
delete_support_case
,
methods
=
[
'POST'
])
@
apply_encryption
def
delete_support_case_details
():
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"message"
:
StatusMessages
.
SUPPORTCASE_SAVE
}
try
:
input_data
=
request
.
data
# input_data = request.get_json()
final_json
=
handler_obj
.
delete_case_details
(
input_data
)
except
Exception
as
e
:
logger
.
exception
(
"Exception ->
%
s"
%
str
(
e
))
return
final_json
@
supportcase_setup_configuration
.
route
(
Endpoints
.
get_support_case_table_details
,
methods
=
[
'POST'
])
@
apply_encryption
def
get_support_case_table_details
():
final_json
=
{
"status"
:
StatusMessages
.
FAILED
,
"message"
:
StatusMessages
.
SUPPORTCASE_SAVE
}
try
:
input_data
=
request
.
data
# input_data = request.get_json()
final_json
=
handler_obj
.
get_support_case_table_details
(
input_data
)
except
Exception
as
e
:
logger
.
exception
(
"Exception ->
%
s"
%
str
(
e
))
return
final_json
\ No newline at end of file
scripts/utils/get_new_id.py
View file @
1c975cab
...
@@ -7,7 +7,7 @@ from scripts.config.db_connection_obj import ConnectionObj
...
@@ -7,7 +7,7 @@ from scripts.config.db_connection_obj import ConnectionObj
if
ConnectionObj
.
mongo_connection_obj
is
None
:
if
ConnectionObj
.
mongo_connection_obj
is
None
:
ConnectionObj
.
mongo_connection_obj
=
MongoConnect
()
ConnectionObj
.
mongo_connection_obj
=
MongoConnect
()
my_col
=
app_constants
.
DBMapping
.
unique_id
my_col
=
app_constants
.
DBMapping
.
unique_id
metadata
=
app_constants
.
DBMapping
.
mongo_db_name
metadata
=
app_constants
.
DBMapping
.
support_lens_configuration
class
GetNewId
:
class
GetNewId
:
...
...
scripts/utils/mongo_utility.py
View file @
1c975cab
...
@@ -1096,6 +1096,12 @@ class MongoConnect(MongoDataEncryption):
...
@@ -1096,6 +1096,12 @@ class MongoConnect(MongoDataEncryption):
try
:
try
:
docid
=
self
.
__mongo_OBJ__
[
db_name
][
collection_name
]
docid
=
self
.
__mongo_OBJ__
[
db_name
][
collection_name
]
mg_response
=
docid
.
aggregate
(
list_for_aggregation
)
mg_response
=
docid
.
aggregate
(
list_for_aggregation
)
# mongo_response = self.fetch_records_from_object(body=mg_response,
# _collection_name=collection_name)
# if not len(mongo_response) or len(mongo_response) > 1:
# return mongo_response
# return mongo_response[0]
return
mg_response
except
Exception
as
e
:
except
Exception
as
e
:
logger
.
error
(
f
"{MONGO006}: {str(e)}"
)
logger
.
error
(
f
"{MONGO006}: {str(e)}"
)
return
mg_response
return
mg_response
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment