Commit 8688eaf7 authored by tarun2512's avatar tarun2512

message

parents
MONGO_URI=mongodb://ilens:ilens4321@192.168.0.220:31589/?authSource=admin&directConnection=true
REDIS_URI=redis://admin:iLensDevRedis@192.168.0.220:32642
PASSWORD=Admin@090
\ No newline at end of file
-----BEGIN RSA PRIVATE KEY-----
MIICWwIBAAKBgQClilTaeHq6Zc+kWHCNl1O0btGRm7ct3O5zqWx1mwwLUWH14eft
Hi5wIbOYh79JQ9BO2OA4UjPq31uwmJ96Okl0OULfENhwd/D7P3mnoRlktPT2t+tt
RRrKvx3wNpOy/3nBsXnNt8EKxyA7k9vbqLbv9pGw2hcqOYe/NGTkmm1PswIDAQAB
AoGAZPARR1l5NBkKYGKQ1rU0E+wSmx+AtVVmjF39RUSyNmB8Q+poebwSgsr58IKt
T6Yq6Tjyl0UAZTGmferCK0xJJrqyP0hMn4nNNut+acWMKyt+9YrA2FO+r5Jb9JuT
SK35xXnM4aZLGppgWJxRzctpIz+qkf6oLRSZme0AuiqcwYECQQDY+QDL3wbWplRW
bze0DsZRMkDAkNY5OCydvjte4SR/mmAzsrpNrS5NztWbaaQrefoPbsdYBPbd8rS7
C/s/0L1zAkEAw1EC5zt2STuhkcKLa/tL+bk8WHHHtf19aC9kBj1TvWBFh+JojWCo
86iK5fLcHzhyQx5Qi3E9LG2HvOWhS1iUwQJAKbEHHyWW2c4SLJ2oVXf1UYrXeGkc
UNhjclgobl3StpZCYAy60cwyNo9E6l0NR7FjhG2j7lzd1t4ZLkvqFmQU0wJATLPe
yQIwBLh3Te+xoxlQD+Tvzuf3/v9qpWSfClhBL4jEJYYDeynvj6iry3whd91J+hPI
m8o/tNfay5L+UcGawQJAAtbqQc7qidFq+KQYLnv5gPRYlX/vNM+sWstUAqvWdMze
JYUoTHKgiXnSZ4mizI6/ovsBOMJTb6o1OJCKQtYylw==
-----END RSA PRIVATE KEY-----
-----BEGIN PUBLIC KEY-----
MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQClilTaeHq6Zc+kWHCNl1O0btGR
m7ct3O5zqWx1mwwLUWH14eftHi5wIbOYh79JQ9BO2OA4UjPq31uwmJ96Okl0OULf
ENhwd/D7P3mnoRlktPT2t+ttRRrKvx3wNpOy/3nBsXnNt8EKxyA7k9vbqLbv9pGw
2hcqOYe/NGTkmm1PswIDAQAB
-----END PUBLIC KEY-----
-----BEGIN RSA PRIVATE KEY-----
MIICWwIBAAKBgQClilTaeHq6Zc+kWHCNl1O0btGRm7ct3O5zqWx1mwwLUWH14eft
Hi5wIbOYh79JQ9BO2OA4UjPq31uwmJ96Okl0OULfENhwd/D7P3mnoRlktPT2t+tt
RRrKvx3wNpOy/3nBsXnNt8EKxyA7k9vbqLbv9pGw2hcqOYe/NGTkmm1PswIDAQAB
AoGAZPARR1l5NBkKYGKQ1rU0E+wSmx+AtVVmjF39RUSyNmB8Q+poebwSgsr58IKt
T6Yq6Tjyl0UAZTGmferCK0xJJrqyP0hMn4nNNut+acWMKyt+9YrA2FO+r5Jb9JuT
SK35xXnM4aZLGppgWJxRzctpIz+qkf6oLRSZme0AuiqcwYECQQDY+QDL3wbWplRW
bze0DsZRMkDAkNY5OCydvjte4SR/mmAzsrpNrS5NztWbaaQrefoPbsdYBPbd8rS7
C/s/0L1zAkEAw1EC5zt2STuhkcKLa/tL+bk8WHHHtf19aC9kBj1TvWBFh+JojWCo
86iK5fLcHzhyQx5Qi3E9LG2HvOWhS1iUwQJAKbEHHyWW2c4SLJ2oVXf1UYrXeGkc
UNhjclgobl3StpZCYAy60cwyNo9E6l0NR7FjhG2j7lzd1t4ZLkvqFmQU0wJATLPe
yQIwBLh3Te+xoxlQD+Tvzuf3/v9qpWSfClhBL4jEJYYDeynvj6iry3whd91J+hPI
m8o/tNfay5L+UcGawQJAAtbqQc7qidFq+KQYLnv5gPRYlX/vNM+sWstUAqvWdMze
JYUoTHKgiXnSZ4mizI6/ovsBOMJTb6o1OJCKQtYylw==
-----END RSA PRIVATE KEY-----
-----BEGIN PUBLIC KEY-----
MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQClilTaeHq6Zc+kWHCNl1O0btGR
m7ct3O5zqWx1mwwLUWH14eftHi5wIbOYh79JQ9BO2OA4UjPq31uwmJ96Okl0OULf
ENhwd/D7P3mnoRlktPT2t+ttRRrKvx3wNpOy/3nBsXnNt8EKxyA7k9vbqLbv9pGw
2hcqOYe/NGTkmm1PswIDAQAB
-----END PUBLIC KEY-----
from dotenv import load_dotenv
import os
load_dotenv()
from scripts.default_workspace_creation import WorkspaceCreation
from scripts.default_user_role_creation import UserRoleCreation
from scripts.default_user_creation import UserCreation
PASSWORD = os.environ.get("PASSWORD")
if __name__ == "__main__":
WorkspaceCreation().global_catalog_workspace_creation()
UserRoleCreation().create_user_role()
UserCreation().create_default_user(PASSWORD)
bcrypt~=4.0.1
cryptography==43.0.1
pydantic~=2.7.3
python-dotenv==1.0.1
ut-mongo-util[stable,encryption]==1.1.1
ut-redis-connector[stable]==0.3.1
\ No newline at end of file
import os
import pathlib
import shutil
from typing import Optional, Any
from dotenv import load_dotenv
from pydantic.v1 import Field, root_validator, BaseSettings
load_dotenv()
class _LoggVariables(BaseSettings):
LOG_LEVEL: str = Field(default="DEBUG")
ENABLE_FILE_LOG: Optional[Any] = Field(default=False)
ENABLE_CONSOLE_LOG: Optional[Any] = Field(default=True)
LOGS_MODULE_PATH: Optional[pathlib.Path] = Field(default="/code/data/default_catalog_meta_logs")
class _Databases(BaseSettings):
MONGO_URI: Optional[str]
REDIS_URI: Optional[str]
REDIS_SPACE_DB: int = Field(default=18)
REDIS_USER_ROLE_DB: Optional[int] = Field(default=21)
class _Security(BaseSettings):
ENCRYPTION_CONSTANTS_FILE_PATH: str = "scripts/config/mongo_encryption_constants.json"
USER_ENCRYPTION: bool = Field(default=True)
class _KeyPath(BaseSettings):
KEYS_PATH: Optional[pathlib.Path] = Field(default="data/keys")
PUBLIC: Optional[pathlib.Path]
PRIVATE: Optional[pathlib.Path]
@root_validator(allow_reuse=True)
def assign_values(cls, values):
if not os.path.isfile(os.path.join(values.get("KEYS_PATH"), "public")) or not os.path.isfile(
os.path.join(values.get("KEYS_PATH"), "private")
):
if not os.path.exists(values.get("KEYS_PATH")):
os.makedirs(values.get("KEYS_PATH"))
shutil.copy(os.path.join("assets", "keys", "public"), os.path.join(values.get("KEYS_PATH"), "public"))
shutil.copy(os.path.join("assets", "keys", "private"), os.path.join(values.get("KEYS_PATH"), "private"))
values["PUBLIC"] = os.path.join(values.get("KEYS_PATH"), "public")
values["PRIVATE"] = os.path.join(values.get("KEYS_PATH"), "private")
return values
DBConf = _Databases()
LoggVariables = _LoggVariables()
Security = _Security()
KeyPath = _KeyPath()
__all__ = [
"DBConf",
"LoggVariables",
"Security",
"KeyPath"
]
\ No newline at end of file
from datetime import datetime
DEFAULT_USER_ROLES = [{
"space_id": "space_099",
"type": "edit",
"user_role_id": "user_role_096",
"access_levels": {
"userManagement": {
"access_level": True
},
"approvalInbox": {
"access_level": True
},
"artifacts": {
"access_level": True
},
"assetExplorer": {
"access_level": True,
"children": {
"overview": False,
"parameters": False,
"alarms": False,
"events": False,
"serviceHistory": False,
"rules": False,
"resources": False,
"dataMapping": False,
"digitalTwin": False,
"materials": False,
"auditLogs": False,
"insights": False,
"treeView": False,
"parameterData": False
}
}
},
"user_role_description": "Admin",
"user_role_name": "Admin",
"user_role_permissions": {
"userManagement": {
"key": "userManagement",
"name": "User Management",
"create": True,
"delete": True,
"edit": True,
"view": True,
"children": {
"users": {
"key": "users",
"name": "User",
"create": True,
"delete": True,
"edit": True,
"view": True
}
}
},
"approvalInbox": {
"key": "approvalInbox",
"name": "Approval Inbox",
"create": True,
"delete": True,
"edit": True,
"view": True
},
"artifacts": {
"key": "artifacts",
"name": "artifacts",
"create": True,
"delete": True,
"edit": True,
"view": True
}
},
"catalogPermission": True
},
{
"space_id": "space_099",
"type": "edit",
"user_role_id": "user_role_097",
"access_levels": {
"userManagement": {
"access_level": True
},
"approvalInbox": {
"access_level": True
},
"artifacts": {
"access_level": True
},
"assetExplorer": {
"access_level": True,
"children": {
"overview": False,
"parameters": False,
"alarms": False,
"events": False,
"serviceHistory": False,
"rules": False,
"resources": False,
"dataMapping": False,
"digitalTwin": False,
"materials": False,
"auditLogs": False,
"insights": False,
"treeView": False,
"parameterData": False
}
}
},
"user_role_description": "Reviewer",
"user_role_name": "Reviewer",
"user_role_permissions": {
"userManagement": {
"key": "userManagement",
"name": "User Management",
"create": True,
"delete": True,
"edit": True,
"view": True,
"children": {
"users": {
"key": "users",
"name": "User",
"create": True,
"delete": True,
"edit": True,
"view": True
}
}
},
"approvalInbox": {
"key": "approvalInbox",
"name": "Approval Inbox",
"create": True,
"delete": True,
"edit": True,
"view": True
},
"artifacts": {
"key": "artifacts",
"name": "artifacts",
"create": True,
"delete": True,
"edit": True,
"view": True
}
},
"catalogPermission": True
},
{
"space_id": "space_099",
"type": "edit",
"user_role_id": "user_role_098",
"access_levels": {
"userManagement": {
"access_level": True
},
"approvalInbox": {
"access_level": True
},
"artifacts": {
"access_level": True
},
"assetExplorer": {
"access_level": True,
"children": {
"overview": False,
"parameters": False,
"alarms": False,
"events": False,
"serviceHistory": False,
"rules": False,
"resources": False,
"dataMapping": False,
"digitalTwin": False,
"materials": False,
"auditLogs": False,
"insights": False,
"treeView": False,
"parameterData": False
}
}
},
"user_role_description": "Operator",
"user_role_name": "Operator",
"user_role_permissions": {
"userManagement": {
"key": "userManagement",
"name": "User Management",
"create": True,
"delete": True,
"edit": True,
"view": True,
"children": {
"users": {
"key": "users",
"name": "User",
"create": True,
"delete": True,
"edit": True,
"view": True
}
}
},
"approvalInbox": {
"key": "approvalInbox",
"name": "Approval Inbox",
"create": True,
"delete": True,
"edit": True,
"view": True
},
"artifacts": {
"key": "artifacts",
"name": "artifacts",
"create": True,
"delete": True,
"edit": True,
"view": True
}
},
"catalogPermission": True
}
]
DEFAULT_USER = {
"encryption_salt": {"dt_0": [], "dt_1": []},
"name": "CatalogUser",
"username": "cataloguser",
"password": "",
"email": "tarun.madamanchi@rockwellautomation.com",
"user_type": "catalog_user",
"phonenumber": 9581388594,
"userrole": ["user_role_096"],
"user_id": "user_097",
"created_by": "user_097",
"product_encrypted": False,
"failed_attempts": 0,
"is_user_locked": False,
"last_failed_attempt": "2021-05-13 08:56:15",
"ilens_encrypted": False,
"passwordReset": None,
"tz": None,
"expires_on": "02/12/21 09:00 30 AM",
"disable_user": False,
"last_logged_in": 1735796769,
"last_failed_login": None,
"fixed_delay": 0,
"variable_delay": 0,
"space_id": "space_099",
"default_user": True,
}
DEFAULT_SPACE = {
"space_id": "space_099",
"userrole": ["user_role_096"],
"created_by": "user_097",
"updated_time": datetime.utcnow().isoformat() + "Z", # UTC in ISO-8601 format
"user_id": "user_097",
"updated_by": "user_097",
}
\ No newline at end of file
{
"encrypt_collection_dict" : {
"user": {
"encrypt_keys": ["phonenumber", "email"],
"exclude_encryption": []}
}
}
from ut_redis_connector import RedisConnector
from scripts.config import DBConf
connector = RedisConnector(DBConf.REDIS_URI)
space_db = connector.connect(db=int(DBConf.REDIS_SPACE_DB), decode_responses=True)
user_role_permissions_redis = connector.connect(db=DBConf.REDIS_USER_ROLE_DB, decode_responses=True)
import re
from typing import Any, Dict, List, Optional, Union
from pydantic import BaseModel
from ut_mongo_util import CollectionBaseClass
from scripts.utils.decryption_util import MongoDataEncryption
from scripts.utils.mongo_utils import MongoCollectionBaseClass as UtilsMongoCollection
class UserCollectionKeys:
KEY_LANGUAGE = "language"
KEY_NAME = "name"
KEY_USER_ID = "user_id"
KEY_SPACE_ID = "space_id"
KEY_USERNAME = "username"
KEY_USER_ROLE = "userrole"
KEY_EMAIL = "email"
class UserSchema(BaseModel):
name: Optional[str] = ""
space_id: Optional[str] = ""
username: Optional[str] = ""
password: Optional[str] = ""
email: Optional[Any] = None
phonenumber: Optional[Any] = None
userrole: Optional[List[str]] = None
user_type: Optional[str] = ""
user_id: Optional[str] = ""
created_by: Optional[str] = ""
encryption_salt: Optional[Dict] = {}
passwordReset: Optional[Dict] = {}
failed_attempts: Optional[int] = 0
is_user_locked: Optional[bool] = False
last_failed_login: Optional[int] = 0
last_logged_in: Optional[int] = 0
last_failed_attempt: Optional[str] = ""
expires_on: Optional[str] = ""
disable_user: Optional[bool] = False
default_user: Optional[bool] = False
created_on: Optional[int] = 0
updated_by: Optional[str] = ""
updated_on: Optional[int] = 0
secret: Optional[str] = ""
password_added_on: Optional[int] = 0
default_space: Optional[str] = ""
fixed_delay: Optional[int] = 0
variable_delay: Optional[int] = 0
class User(CollectionBaseClass):
def __init__(self, mongo_client, space_id=None):
super().__init__(mongo_client, database="catalog_meta_dub", collection="user")
self.space_id = space_id
self.key_user_id = UserCollectionKeys.KEY_USER_ID
self.key_space_id = UserCollectionKeys.KEY_SPACE_ID
self.key_username = UserCollectionKeys.KEY_USERNAME
self.key_email = UserCollectionKeys.KEY_EMAIL
self.find_decrypted = UtilsMongoCollection.find_decrypted.__get__(self, UtilsMongoCollection)
self.get_decrypted_records = UtilsMongoCollection.get_decrypted_records.__get__(self, UtilsMongoCollection)
self.data_encryption = MongoDataEncryption()
def update_user(self, query, data):
"""
The following function will update target details in rule_targets collections
:param self:
:param data:
:return:
"""
return self.update_one(query=query, data=data, upsert=True)
def insert_one_user(self, data):
"""
The following function will insert one user in the
user collections
:param self:
:param data:
:return:
"""
data = self.data_encryption.encrypt_data(data, collection_name="user")
return self.insert_one(data)
def find_user(self, space_id, user_id=None, username=None, email=None, filter_dict=None):
query = {}
if user_id:
query[self.key_user_id] = user_id
if username:
query[self.key_username] = username
if email:
query[self.key_email] = re.compile(email, re.IGNORECASE)
query[self.key_email] = email
user = self.find_decrypted(query=query, filter_dict=filter_dict)
if user:
return UserSchema(**user)
return user
@staticmethod
def get_users_list(space_id=None):
query_json = [
{
"$group": {
"_id": None,
"data": {"$push": {"k": {"$ifNull": ["$user_id", ""]}, "v": {"$ifNull": ["$username", ""]}}},
}
},
{"$replaceRoot": {"newRoot": {"$arrayToObject": "$data"}}},
]
if space_id:
query_json.insert(0, {"$match": {"space_id": space_id}})
return query_json
def users_list_by_aggregate(self, query: list):
return self.aggregate(pipelines=query)
def find_user_by_space_id(self, user_id, space_id):
user = self.find_one(query={self.key_user_id: user_id, self.key_space_id: space_id})
if user:
return dict(user)
return user
def get_all_users(self, filter_dict=None, sort=None, skip=0, limit=None, **query):
users = self.find(filter_dict=filter_dict, sort=sort, skip=skip, limit=limit, query=query)
if users:
return list(users)
return []
def find_user_role_for_user_id(self, user_id, space_id):
query = {"user_id": user_id, "space_id": space_id}
filter_dict = {"userrole": 1, "_id": 0}
return self.find_one(query=query, filter_dict=filter_dict)
def find_base_user(self, space_id=None, user_id=None, username=None, email=None, filter_dict=None):
query = {}
if space_id:
query[self.key_space_id] = space_id
if user_id:
query[self.key_user_id] = user_id
if username:
query[self.key_username] = username
if email:
query[self.key_email] = re.compile(email, re.IGNORECASE)
if not (user := self.find_decrypted(query=query, filter_dict=filter_dict)):
return user
try:
return UserSchema(**user)
except Exception:
return user
def find_by_space(
self,
projections=None,
sort=None,
query_dict=None,
limit=None,
skip=0,
**filters,
) -> Union[Any, None]:
query = {}
if query_dict:
query |= query_dict
if filters:
query.update(filters)
records = self.find(query, projections, sort=sort, limit=limit, skip=skip)
if records:
records = self.get_decrypted_records(records)
return list(records) if records else []
def delete_one_user(self, user_id, space_id):
return self.delete_one(query={self.key_user_id: user_id, self.key_space_id: space_id})
def update_one_user(self, query, data):
"""
The following function will insert one user in the
user collections
:param self:
:param data:
:return:
"""
data = self.data_encryption.encrypt_data(data, collection_name="user")
return self.update_one(query=query, data=data, upsert=True)
def get_data_by_aggregate(self, query_json: list):
if aggregate_data := list(self.aggregate(query_json)):
aggregate_data = self.get_decrypted_records(aggregate_data)
return aggregate_data
return []
def find_by_aggregate(self, query_json: list):
if user_by_aggregate := list(self.aggregate(query_json)):
return user_by_aggregate
return []
def distinct_user(self, query_key, filter_json):
query = {self.key_user_id: filter_json}
return self.distinct(query_key=query_key, filter_json=query)
def find_user_by_param(self, **query):
user = self.get_decrypted_records(self.find(query))
if not bool(user):
user = []
return user
from typing import Optional
from ut_mongo_util import CollectionBaseClass
class UserCollectionKeys:
KEY_LANGUAGE = "language"
KEY_NAME = "name"
KEY_USER_ID = "user_id"
KEY_SPACE_ID = "space_id"
KEY_USERNAME = "username"
KEY_USER_ROLE = "user_role_name"
KEY_EMAIL = "email"
class UserRole(CollectionBaseClass):
def __init__(self, mongo_client, space_id=None):
super().__init__(mongo_client, database="catalog_meta_dub", collection="user_role")
self.space_id = space_id
self.key_user_id = UserCollectionKeys.KEY_USER_ID
self.key_space_id = UserCollectionKeys.KEY_SPACE_ID
def update_user_role(self, query, data):
"""
The following function will update target details in rule_targets collections
:param self:
:param data:
:return:
"""
return self.update_one(query=query, data=data, upsert=True)
def find_user(self, user_id):
user = self.find_one(query={"user_id": user_id})
if user:
return dict(user)
return user
def find_user_name(self, user_id, space_id: Optional[str]):
query = {"user_role_id": user_id, "space_id": space_id}
one_user = self.find_one(filter_dict={"user_role_name": 1, "_id": 0}, query=query)
if one_user is None:
return one_user
return one_user["user_role_name"]
@staticmethod
def get_users_list(space_id=None):
query_json = [
{
"$group": {
"_id": None,
"data": {"$push": {"k": {"$ifNull": ["$user_id", ""]}, "v": {"$ifNull": ["$username", ""]}}},
}
},
{"$replaceRoot": {"newRoot": {"$arrayToObject": "$data"}}},
]
if space_id:
query_json.insert(0, {"$match": {"space_id": space_id}})
return query_json
def users_list_by_aggregate(self, query: list):
return self.aggregate(pipelines=query)
def find_user_by_space_id(self, user_id, space_id):
user = self.find_one(query={self.key_user_id: user_id, self.key_space_id: space_id})
if user:
return dict(user)
return user
def find_user_role_by_id(self, user_role_id, filter_dict=None):
return self.find_one(query={"user_role_id": user_role_id}, filter_dict=filter_dict)
def find_user_role_by_aggregate(self, query):
if role_by_aggregate := list(self.aggregate(query)):
return role_by_aggregate
return []
from ut_mongo_util import CollectionBaseClass, mongo_client
class UserCollectionKeys:
KEY_LANGUAGE = "language"
KEY_NAME = "name"
KEY_USER_ID = "user_id"
KEY_SPACE_ID = "space_id"
KEY_USERNAME = "username"
KEY_USER_ROLE = "userrole"
class UserSpace(CollectionBaseClass):
key_username = UserCollectionKeys.KEY_USERNAME
key_user_id = UserCollectionKeys.KEY_USER_ID
key_language = UserCollectionKeys.KEY_LANGUAGE
key_name = UserCollectionKeys.KEY_NAME
key_space_id = UserCollectionKeys.KEY_SPACE_ID
def __init__(self):
super().__init__(
mongo_client,
database="catalog_meta_dub",
collection="user_space",
)
def fetch_user_space(self, user_id, space_id):
query = {self.key_user_id: user_id, self.key_space_id: space_id}
user = self.find_one(query=query)
return user
def fetch_user_space_with_details(self, user_id, space_id):
query = [
{"$match": {"user_id": user_id, "space_id": space_id}},
{"$lookup": {"from": "user", "localField": "user_id", "foreignField": "user_id", "as": "user_details"}},
{"$unwind": {"path": "$user_details"}},
{
"$project": {
"space_id": 1,
"AccessLevel": 1,
"access_group_ids": 1,
"userrole": 1,
"user_id": 1,
"name": "$user_details.name",
"email": "$user_details.email",
"username": "$user_details.username",
}
},
]
user = self.aggregate(query)
user_list = list(user)
if user_list:
return user_list[0]
else:
return None
def find_user_role_for_user_id(self, user_id, space_id):
query = {"user_id": user_id, "space_id": space_id}
filter_dict = {"userrole": 1, "_id": 0}
return self.find_one(query=query, filter_dict=filter_dict)
def update_one_user_space(self, data, user_id, space_id):
query = {self.key_user_id: user_id, "space_id": space_id}
return self.update_one(query=query, data=data, upsert=True)
def insert_one_user(self, data):
"""
The following function will insert one user in the
user collections
:param self:
:param data:
:return:
"""
return self.insert_one(data)
def delete_one_user_space(self, user_id, space_id):
return self.delete_one(query={self.key_user_id: user_id, self.key_space_id: space_id})
from typing import Dict, Optional
from pydantic import BaseModel
from scripts.utils.mongo_utils import MongoCollectionBaseClass
class WorkSpacesSchema(BaseModel):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
space_id: Optional[str] = ""
space_name: Optional[str] = ""
space_type: Optional[str] = ""
meta: Optional[Dict] = {}
user_id: Optional[str] = ""
source_meta: Optional[Dict] = {}
access_token: Optional[str] = ""
catalog_url: Optional[str] = ""
class WorkSpaces(MongoCollectionBaseClass):
def __init__(self, mongo_client):
super().__init__(mongo_client, database="catalog_meta_dub", collection="workspaces")
@property
def key_space_id(self):
return "space_id"
@property
def key_space_name(self):
return "space_name"
def find_space(self, space_id=None, space_name=None, filter_dict=None):
"""
The following function will give one record for a given set of
search parameters as keyword arguments
:param space_name:
:param filter_dict:
:param space_id:
:return:
"""
query = {}
if space_id:
query.update({self.key_space_id: space_id})
if space_name:
query.update({self.key_space_name: space_name})
record = self.find_one(query=query, filter_dict=filter_dict)
if not record:
return {}
return WorkSpacesSchema(**record).dict()
def find_space_by_query(self, query, filter_dict=None):
record = self.find(query=query, filter_dict=filter_dict)
if record:
return record
return []
def fetch_space_details(self):
query = {}
filter_dict = {self.key_space_id: 1, "_id": 0, self.key_space_name: 1}
records = self.find(query=query, filter_dict=filter_dict)
if records:
space_name_mapp = {}
for record in records:
space_name_mapp[record.get(self.key_space_id)] = record.get(self.key_space_name)
return space_name_mapp
return {}
def insert_one_space(self, data):
"""
The following function will insert one space in the
customer_spaces collections
:param self:
:param data:
:return:
"""
return self.insert_one(data)
def delete_one_space(self, space_id):
if space_id:
query = {self.key_space_id: space_id}
return self.delete_one(query)
else:
return False
def get_space_data_by_aggregate(self, query: list):
return list(self.aggregate(pipelines=query))
def update_one_space(self, data, space_id, upsert=False):
"""
The following function will update one step in
steps collection based on the given query
:param data:
:param upsert:
:param space_id:
:return:
"""
query = {"space_id": space_id}
response = self.update_one(data=data, upsert=upsert, query=query)
return response
def delete_workspaces(self, space_id_list):
query = {self.key_space_id: {"$in": space_id_list}}
response = self.delete_many(query)
return response.deleted_count
import re
from copy import deepcopy
import bcrypt
from ut_mongo_util import mongo_client
from scripts.config.default_meta_catalog_constants import DEFAULT_USER
from scripts.db.user import User
from scripts.errors import CustomError
from scripts.logging import logger
class UserCreation:
def __init__(self):
"""
The __init__ function is called when the class is instantiated.
It sets up the instance of the class, and defines all of its attributes.
:param self: Represent the instance of the class
:param : Pass the mongo client to the class
:return: The following:
"""
self.user_conn = User(mongo_client=mongo_client)
@staticmethod
def validate_password_strength(password):
"""
This method is to validate the password strength
"""
try:
logger.info("Validate password strength")
conditions = [
len(password) > 7,
len(password) < 65,
re.search("[a-z]", password) is not None,
re.search(r"\d", password) is not None,
re.search("[A-Z]", password) is not None,
re.search("[!@#$%^&*]", password) is not None,
not re.search("\\s", password),
]
password_validation_status = all(conditions)
except Exception as e:
logger.error(f"Error occurred while validating the password strength : {str(e)}")
password_validation_status = False
return password_validation_status
def encrypt_password(self, password):
# Decrypt encrypted password
if not self.validate_password_strength(password):
message = (
"Password should contain minimum of 8 characters with at least a symbol, "
"one upper and one lower case letters and a number"
)
raise CustomError(message)
hash_pass = bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt())
if isinstance(hash_pass, bytes):
hash_pass = hash_pass.decode()
return hash_pass
def create_default_user(self, password):
try:
enc_password = self.encrypt_password(password)
admin_user = deepcopy(DEFAULT_USER)
admin_user.update({"password": enc_password})
user_record = self.user_conn.find_user_by_space_id(
user_id=admin_user.get("user_id"), space_id=admin_user.get("space_id")
)
if not user_record:
self.user_conn.insert_one_user(admin_user)
except Exception as e:
logger.error(f"Error creating public default data {str(e)}")
import json
import os
from ut_mongo_util import mongo_client
from scripts.config.default_meta_catalog_constants import DEFAULT_USER_ROLES
from scripts.db.user import User
from scripts.db.user_role import UserRole
from scripts.db.redis_connection import user_role_permissions_redis
from scripts.logging import logger
class UserRolesPermissionKeys:
required_keys = [
each_key.strip()
for each_key in os.environ.get(
"USER_ROLE_KEYS",
default="edit, create, view, publish, delete, "
"clone, share,attachLicense, operator,"
"auditLogs, screenshotRestriction, dashboard, title, auditLogsDownload, attachLicense, feedback, "
"uploadLicense",
).split(",")
]
class UserRoleCreation:
def __init__(self):
"""
The __init__ function is called when the class is instantiated.
It sets up the instance of the class, and defines all of its attributes.
:param self: Represent the instance of the class
:param : Pass the mongo client to the class
:return: The following:
"""
self.user_role_conn = UserRole(mongo_client=mongo_client)
self.user_conn = User(mongo_client=mongo_client)
@staticmethod
def save_permissions(p_id, permissions, user_role_id):
for permission_name, permissions_allowed in permissions.items():
permissions_filtered = {x: y for x, y in permissions_allowed.items() if x in UserRolesPermissionKeys.required_keys}
user_role_permissions_redis.hset(f"{p_id}__{user_role_id}", permission_name,
json.dumps(permissions_filtered))
def user_role_redis_update(self):
"""
Function to update redis with the user roles
"""
try:
logger.info("Updating redis with user role details")
space_details = self.user_conn.find({}, {"_id": 0, "space_id": 1, "userrole": 1})
for each_space in space_details:
all_roles = UserRole(mongo_client=mongo_client).find({"user_role_id": {"$in": each_space["userrole"]}})
for user_r in all_roles:
user_role_id = user_r["user_role_id"]
permissions = user_r["user_role_permissions"]
self.save_permissions(each_space["space_id"], permissions, user_role_id)
logger.info("Updated redis with user role details")
return True
except Exception as e:
logger.exception("Failed to update redis with user roles", str(e))
return False
def create_user_role(self):
try:
for user_role in DEFAULT_USER_ROLES:
self.user_role_conn.update_user_role({"user_role_id": user_role.get("user_role_id")}, user_role)
except Exception as e:
logger.error(f"Error creating public default data {str(e)}")
import json
import logging
import time
from copy import deepcopy
from ut_mongo_util import mongo_client
from scripts.config.default_meta_catalog_constants import DEFAULT_SPACE
from scripts.db.user_space import UserSpace
from scripts.db.workspaces import WorkSpaces
from scripts.db.redis_connection import space_db
from scripts.errors import WorkspaceNameExistError
from scripts.schema import CreateWorkspace
class WorkspaceCreation:
def __init__(self):
"""
The __init__ function is called when the class is instantiated.
It sets up the instance of the class, and defines all of its attributes.
:param self: Represent the instance of the class
:param : Pass the mongo client to the class
:return: The following:
"""
self.workspace_conn = WorkSpaces(mongo_client=mongo_client)
self.user_space_conn = UserSpace()
def validate_name_catalog(self, workspace_name, space_id):
try:
existing_space_details = self.workspace_conn.find_space(space_name=workspace_name, space_id=space_id)
return existing_space_details
except Exception as e:
logging.error(f"Error occurred in the validate name in catalog {str(e)}")
@staticmethod
def set_or_update_redis(redis_client, add_prefix_to_database, space_id):
"""
Checks if a key exists in Redis and inserts or updates it with the given value.
Dynamically builds the value based on `add_prefix_to_database` and `space_id`.
:param redis_client: Redis client instance.
:param add_prefix_to_database: Boolean flag indicating if the prefix should be added.
:param space_id: The space_id used as the prefix for the database.
"""
key = space_id
# Build the dynamic source_meta dictionary
source_meta = {
"add_prefix_to_database": add_prefix_to_database,
"prefix": space_id if add_prefix_to_database else "",
}
# Prepare the JSON object to store in Redis
value_json = json.dumps({"source_meta": source_meta})
try:
# Insert or update the key in Redis
redis_client.set(key, value_json)
logging.info(f"Key '{key}' has been set or updated successfully.")
except Exception as e:
logging.error(f"Error occurred while setting/updating key '{key}': {e}")
raise
def global_catalog_workspace_creation(self):
try:
user_id = "user_097"
data = deepcopy(
CreateWorkspace(
space_id="space_099", space_name="Central Workspace", space_type="public", user_id=user_id
)
)
existing_space_details = self.validate_name_catalog(workspace_name=data.space_name, space_id=data.space_id)
if existing_space_details:
logging.debug(f"It is already existing space_name is {str(data.space_name)}")
return {"space_id": data.space_id}
data.meta.update({"updated_at": int(time.time() * 1000), "created_by": user_id, "updated_by": user_id})
count = self.workspace_conn.update_one_space(data.dict(), data.space_id, upsert=True)
logging.debug(f"Updated Count {str(count)} ")
self.set_or_update_redis(space_db, add_prefix_to_database=False, space_id=data.space_id)
self.user_space_conn.update_one_user_space(DEFAULT_SPACE, DEFAULT_SPACE.get("user_id"),
DEFAULT_SPACE.get("space_id"))
return {"space_id": data.space_id}
except WorkspaceNameExistError:
raise WorkspaceNameExistError
except Exception as e:
logging.error(f"Error occurred in the global catalog creation due to {str(e)}")
\ No newline at end of file
class ILensErrors(Exception):
def __init__(self, msg):
Exception.__init__(self, msg)
"""
Base Error Class
"""
class ILensErrorsWithoutMessage(Exception):
"""Generic iLens Error"""
class ErrorMessages:
ERROR001 = "Authentication Failed. Please verify token"
ERROR002 = "Signature Expired"
ERROR003 = "Signature Not Valid"
ERROR004 = "User Record Not Found"
WORKSPACE_NAME_EXIST_ERROR = "Workspace Name Exist. Please Use different name"
WORKSPACE_CATALOG_URL_ERROR = "Please add valid catalog url"
class JobCreationError(Exception):
"""
Raised when a Job Creation throws an exception.
Job Creation happens by adding a record to Mongo.
"""
class UnknownError(Exception):
pass
class DuplicateSpaceNameError(Exception):
pass
class KairosDBError(Exception):
pass
class UnauthorizedError(Exception):
pass
class ImageValidation(Exception):
pass
class ILensError(Exception):
pass
class NameExists(Exception):
pass
class InputRequestError(ILensError):
pass
class IllegalTimeSelectionError(ILensError):
pass
class DataNotFound(Exception):
pass
class AuthenticationError(ILensError):
"""
JWT Authentication Error
"""
class JWTDecodingError(Exception):
pass
class DuplicateReportNameError(Exception):
pass
class PathNotExistsException(Exception):
pass
class ImplementationError(Exception):
pass
class UserRoleNotFoundException(Exception):
pass
class CustomError(Exception):
pass
class IllegalToken(ILensErrors):
pass
class InvalidPasswordError(ILensErrors):
pass
class UserNotFound(ILensErrors):
pass
class TooManyRequestsError(Exception):
pass
class FixedDelayError(ILensErrors):
pass
class VariableDelayError(ILensErrors):
pass
class LicenceValidationError(Exception):
pass
class CustomAppError:
FAILED_TO_SAVE = "Failed to save app"
class WorkspaceNameExistError(ILensErrorsWithoutMessage):
pass
class GlobalCatalogError(Exception):
"""Generic GlobalcatalogErrors Error"""
def __init__(self, msg):
Exception.__init__(self, msg)
"""
Base Error Class
"""
class ILensException(Exception):
pass
class MongoException(ILensException):
pass
class MongoConnectionException(MongoException):
pass
class MongoQueryException(MongoException):
pass
class MongoEncryptionException(MongoException):
pass
class MongoRecordInsertionException(MongoQueryException):
pass
class MongoFindException(MongoQueryException):
pass
class MongoDeleteException(MongoQueryException):
pass
class MongoUpdateException(MongoQueryException):
pass
class MongoUnknownDatatypeException(MongoEncryptionException):
pass
class MongoDistictQueryException(MongoException):
pass
class MongoFindAndReplaceException(MongoException):
pass
class MongoObjectDeserializationException(MongoException):
pass
import logging
import os
from logging import StreamHandler
from logging.handlers import RotatingFileHandler, SocketHandler
import yaml
from scripts.config import LoggVariables
# this method is to read the configuration from backup.conf
def read_configuration(file_name):
"""
:param file_name:
:return: all the configuration constants
"""
with open(file_name) as stream:
try:
return yaml.safe_load(stream)
except Exception as e:
print(f"Failed to load Configuration. Error: {e}")
config = read_configuration("scripts/logging/logger_conf.yml")
logging_config = config["logger"]
logging_config["level"] = LoggVariables.LOG_LEVEL
def add_logging_level(level_name, level_num, method_name=None):
"""
Comprehensively adds a new logging level to the `logging` module and the
currently configured logging class.
`level_name` becomes an attribute of the `logging` module with the value
`level_num`. `method_name` becomes a convenience method for both `logging`
itself and the class returned by `logging.getLoggerClass()` (usually just
`logging.Logger`). If `method_name` is not specified, `level_name.lower()` is
used.
To avoid accidental clobbering of existing attributes, this method will
raise an `AttributeError` if the level name is already an attribute of the
`logging` module or if the method name is already present
Example
-------
> add_logging_level('TRACE', logging.DEBUG - 5)
> logging.getLogger(__name__).setLevel("TRACE")
> logging.getLogger(__name__).trace('that worked')
> logging.trace('so did this')
> logging.TRACE
"""
if not method_name:
method_name = level_name.lower()
if hasattr(logging, level_name):
raise AttributeError("{} already defined in logging module".format(level_name))
if hasattr(logging, method_name):
raise AttributeError("{} already defined in logging module".format(method_name))
if hasattr(logging.getLoggerClass(), method_name):
raise AttributeError("{} already defined in logger class".format(method_name))
def log_for_level(self, message, *args, **kwargs):
if self.isEnabledFor(level_num):
self._log(level_num, message, args, **kwargs)
def log_to_root(message, *args, **kwargs):
logging.log(level_num, message, *args, **kwargs)
logging.addLevelName(level_num, level_name)
setattr(logging, level_name, level_num)
setattr(logging.getLoggerClass(), method_name, log_for_level)
setattr(logging, method_name, log_to_root)
def get_logger():
"""
Creates a rotating log
"""
__logger__ = logging.getLogger("ilens")
add_logging_level("QTRACE", logging.DEBUG - 5)
__logger__.setLevel(logging_config["level"].upper())
log_formatter = "%(asctime)s - %(levelname)-6s - [%(threadName)5s:%(funcName)5s():" + "%(lineno)s] - %(message)s"
time_format = "%Y-%m-%d %H:%M:%S"
file_path = LoggVariables.LOGS_MODULE_PATH
formatter = logging.Formatter(log_formatter, time_format)
for each_handler in logging_config["handlers"]:
if each_handler["type"] in ["RotatingFileHandler"] and LoggVariables.ENABLE_FILE_LOG:
if not os.path.exists(file_path):
os.makedirs(file_path)
log_file = os.path.join(file_path, f"{logging_config['name']}.log")
temp_handler = RotatingFileHandler(
log_file, maxBytes=each_handler["max_bytes"], backupCount=each_handler["back_up_count"]
)
temp_handler.setFormatter(formatter)
elif each_handler["type"] in ["SocketHandler"]:
temp_handler = SocketHandler(each_handler["host"], each_handler["port"])
elif each_handler["type"] in ["StreamHandler"] and LoggVariables.ENABLE_CONSOLE_LOG:
temp_handler = StreamHandler()
temp_handler.setFormatter(formatter)
else:
temp_handler = None
if temp_handler:
__logger__.addHandler(temp_handler)
return __logger__
logger = get_logger()
logger:
name: catalog_default_meta
level: INFO
handlers:
- type: RotatingFileHandler
file_path: data/catalog_default_meta/logs/
max_bytes: 100000000
back_up_count: 5
- type: SocketHandler
host: localhost
port: 23582
- type: StreamHandler
name: catalog_default_meta
import time
from typing import Optional
from pydantic import Field, BaseModel
class CreateWorkspace(BaseModel):
space_id: str
space_name: str
space_type: str
user_id: str = Field(default="user_097")
source_meta: Optional[dict] = Field(default={"add_prefix_to_database": False})
meta: Optional[dict] = Field(default={"created_at": int(time.time())})
access_token: Optional[str] = Field(default=None)
catalog_url: Optional[str] = Field(default=None)
import json
from functools import lru_cache
@lru_cache()
def get_db_name(redis_client, space_id: str, database: str, delimiter="__"):
if not space_id:
return database
val = redis_client.get(space_id)
if val is None:
raise ValueError(f"Unknown Space, Space ID: {space_id} Not Found!!!")
val = json.loads(val)
if not val:
return database
# Get the prefix flag to apply space_id prefix to any db
prefix_condition = bool(val.get("source_meta", {}).get("add_prefix_to_database"))
if prefix_condition:
# Get the prefix name from mongo or default to space_id
prefix_name = val.get("source_meta", {}).get("prefix") or space_id
return f"{prefix_name}{delimiter}{database}"
return database
@lru_cache()
def get_redis_db_prefix(redis_client, space_id: str, delimiter="__"):
if not space_id:
return False
val = redis_client.get(space_id)
if val is None:
return False
val = json.loads(val)
if not val:
return False
# Get the prefix flag to apply space_id prefix to any db
prefix_condition = bool(val.get("source_meta", {}).get("add_prefix_to_database"))
if prefix_condition:
# Get the prefix name from mongo or default to space_id
prefix_name = val.get("source_meta", {}).get("prefix") or space_id
return f"{prefix_name}{delimiter}"
return False
def get_space_data_from_redis(redis_client, space_id: str):
record = redis_client.get(space_id)
if record is None:
raise ValueError(f"Unknown Space, Space ID: {space_id} Not Found!!!")
if record := json.loads(record):
return record
This diff is collapsed.
import jwt
from jwt.exceptions import (
ExpiredSignatureError,
InvalidSignatureError,
MissingRequiredClaimError,
)
from scripts.config import KeyPath
from scripts.errors import AuthenticationError, ErrorMessages
from scripts.logging import logger
class Secrets:
LOCK_OUT_TIME_MINS = 30
leeway_in_mins = 10
unique_key = "45c37939-0f75"
token = "8674cd1d-2578-4a62-8ab7-d3ee5f9a"
issuer = "ilens"
alg = "RS256"
SECRET_FOR_SUPPORT_LENS = "WeSupport24X7UnifyTwinX#"
ISS = "unifytwin"
AUD = "supportlens"
signature_key = "kliLensKLiLensKL"
signature_key_alg = ["HS256"]
class JWT:
def __init__(self):
self.max_login_age = Secrets.LOCK_OUT_TIME_MINS
self.issuer = Secrets.issuer
self.alg = Secrets.alg
self.public = KeyPath.PUBLIC
self.private = KeyPath.PRIVATE
def encode(self, payload):
try:
with open(self.private) as f:
key = f.read()
return jwt.encode(payload, key, algorithm=self.alg)
except Exception as e:
logger.exception(f"Exception while encoding JWT: {str(e)}")
raise
finally:
f.close()
def decode(self, token):
try:
with open(self.public) as f:
key = f.read()
return jwt.decode(token, key, algorithms=self.alg)
except Exception as e:
logger.exception(f"Exception while encoding JWT: {str(e)}")
raise
finally:
f.close()
def validate(self, token):
try:
with open(self.public) as f:
key = f.read()
payload = jwt.decode(
token,
key,
algorithms=self.alg,
leeway=Secrets.leeway_in_mins,
options={"require": ["exp", "iss"]},
)
return payload
except InvalidSignatureError:
raise AuthenticationError(ErrorMessages.ERROR003)
except ExpiredSignatureError:
raise AuthenticationError(ErrorMessages.ERROR002)
except MissingRequiredClaimError:
raise AuthenticationError(ErrorMessages.ERROR002)
except Exception as e:
logger.exception(f"Exception while validating JWT: {str(e)}")
raise
finally:
f.close()
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment