Commit aae4a93f authored by tarun2512's avatar tarun2512

Your commit message here

parents
DESTINATION_MONGO_URI=mongodb://ilens:ilens4321@192.168.0.220:31589/?authSource=admin&directConnection=true
#MONGO_URI=mongodb://admin:iLens%23QAv513@192.168.0.217:30904/?authMechanism=DEFAULT&directConnection=true
SOURCE_MONGO_URI=mongodb://admin:UtAdm%23Mong771385@192.168.0.207:8098/?authMechanism=DEFAULT&directConnection=true
DESTINATION_REDIS_URI=redis://admin:iLensDevRedis@192.168.0.220:32642
#REDIS_URI=redis://admin:iLensQARedis@192.168.0.217:30910
SOURCE_REDIS_URI=redis://admin:iLensProdRedis@192.168.0.207:8213
SPACE_ID=space_127
PROJECT_ID=project_102
-----BEGIN RSA PRIVATE KEY-----
MIICWwIBAAKBgQClilTaeHq6Zc+kWHCNl1O0btGRm7ct3O5zqWx1mwwLUWH14eft
Hi5wIbOYh79JQ9BO2OA4UjPq31uwmJ96Okl0OULfENhwd/D7P3mnoRlktPT2t+tt
RRrKvx3wNpOy/3nBsXnNt8EKxyA7k9vbqLbv9pGw2hcqOYe/NGTkmm1PswIDAQAB
AoGAZPARR1l5NBkKYGKQ1rU0E+wSmx+AtVVmjF39RUSyNmB8Q+poebwSgsr58IKt
T6Yq6Tjyl0UAZTGmferCK0xJJrqyP0hMn4nNNut+acWMKyt+9YrA2FO+r5Jb9JuT
SK35xXnM4aZLGppgWJxRzctpIz+qkf6oLRSZme0AuiqcwYECQQDY+QDL3wbWplRW
bze0DsZRMkDAkNY5OCydvjte4SR/mmAzsrpNrS5NztWbaaQrefoPbsdYBPbd8rS7
C/s/0L1zAkEAw1EC5zt2STuhkcKLa/tL+bk8WHHHtf19aC9kBj1TvWBFh+JojWCo
86iK5fLcHzhyQx5Qi3E9LG2HvOWhS1iUwQJAKbEHHyWW2c4SLJ2oVXf1UYrXeGkc
UNhjclgobl3StpZCYAy60cwyNo9E6l0NR7FjhG2j7lzd1t4ZLkvqFmQU0wJATLPe
yQIwBLh3Te+xoxlQD+Tvzuf3/v9qpWSfClhBL4jEJYYDeynvj6iry3whd91J+hPI
m8o/tNfay5L+UcGawQJAAtbqQc7qidFq+KQYLnv5gPRYlX/vNM+sWstUAqvWdMze
JYUoTHKgiXnSZ4mizI6/ovsBOMJTb6o1OJCKQtYylw==
-----END RSA PRIVATE KEY-----
-----BEGIN PUBLIC KEY-----
MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQClilTaeHq6Zc+kWHCNl1O0btGR
m7ct3O5zqWx1mwwLUWH14eftHi5wIbOYh79JQ9BO2OA4UjPq31uwmJ96Okl0OULf
ENhwd/D7P3mnoRlktPT2t+ttRRrKvx3wNpOy/3nBsXnNt8EKxyA7k9vbqLbv9pGw
2hcqOYe/NGTkmm1PswIDAQAB
-----END PUBLIC KEY-----
-----BEGIN RSA PRIVATE KEY-----
MIICWwIBAAKBgQClilTaeHq6Zc+kWHCNl1O0btGRm7ct3O5zqWx1mwwLUWH14eft
Hi5wIbOYh79JQ9BO2OA4UjPq31uwmJ96Okl0OULfENhwd/D7P3mnoRlktPT2t+tt
RRrKvx3wNpOy/3nBsXnNt8EKxyA7k9vbqLbv9pGw2hcqOYe/NGTkmm1PswIDAQAB
AoGAZPARR1l5NBkKYGKQ1rU0E+wSmx+AtVVmjF39RUSyNmB8Q+poebwSgsr58IKt
T6Yq6Tjyl0UAZTGmferCK0xJJrqyP0hMn4nNNut+acWMKyt+9YrA2FO+r5Jb9JuT
SK35xXnM4aZLGppgWJxRzctpIz+qkf6oLRSZme0AuiqcwYECQQDY+QDL3wbWplRW
bze0DsZRMkDAkNY5OCydvjte4SR/mmAzsrpNrS5NztWbaaQrefoPbsdYBPbd8rS7
C/s/0L1zAkEAw1EC5zt2STuhkcKLa/tL+bk8WHHHtf19aC9kBj1TvWBFh+JojWCo
86iK5fLcHzhyQx5Qi3E9LG2HvOWhS1iUwQJAKbEHHyWW2c4SLJ2oVXf1UYrXeGkc
UNhjclgobl3StpZCYAy60cwyNo9E6l0NR7FjhG2j7lzd1t4ZLkvqFmQU0wJATLPe
yQIwBLh3Te+xoxlQD+Tvzuf3/v9qpWSfClhBL4jEJYYDeynvj6iry3whd91J+hPI
m8o/tNfay5L+UcGawQJAAtbqQc7qidFq+KQYLnv5gPRYlX/vNM+sWstUAqvWdMze
JYUoTHKgiXnSZ4mizI6/ovsBOMJTb6o1OJCKQtYylw==
-----END RSA PRIVATE KEY-----
-----BEGIN PUBLIC KEY-----
MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQClilTaeHq6Zc+kWHCNl1O0btGR
m7ct3O5zqWx1mwwLUWH14eftHi5wIbOYh79JQ9BO2OA4UjPq31uwmJ96Okl0OULf
ENhwd/D7P3mnoRlktPT2t+ttRRrKvx3wNpOy/3nBsXnNt8EKxyA7k9vbqLbv9pGw
2hcqOYe/NGTkmm1PswIDAQAB
-----END PUBLIC KEY-----
from dotenv import load_dotenv
import os
from scripts.core.migrate_asset_models import MigrateAssetModel
load_dotenv()
SPACE_ID = os.environ.get("SPACE_ID")
PROJECT_ID = os.environ.get("PROJECT_ID")
if __name__ == "__main__":
MigrateAssetModel(space_id=SPACE_ID, project_id=PROJECT_ID).insert_asset_model_details()
bcrypt~=4.0.1
cryptography==43.0.1
pydantic~=2.7.3
python-dotenv==1.0.1
ut-mongo-util[stable,encryption]==1.1.1
ut-redis-connector[stable]==0.3.1
\ No newline at end of file
import os
import pathlib
import shutil
from typing import Optional, Any
from dotenv import load_dotenv
from pydantic.v1 import Field, root_validator, BaseSettings
load_dotenv()
class _LoggVariables(BaseSettings):
LOG_LEVEL: str = Field(default="DEBUG")
ENABLE_FILE_LOG: Optional[Any] = Field(default=False)
ENABLE_CONSOLE_LOG: Optional[Any] = Field(default=True)
LOGS_MODULE_PATH: Optional[pathlib.Path] = Field(default="/code/data/default_catalog_meta_logs")
class _Databases(BaseSettings):
SOURCE_MONGO_URI: Optional[str]
DESTINATION_MONGO_URI: Optional[str]
SOURCE_REDIS_URI: Optional[str]
DESTINATION_REDIS_URI: Optional[str]
REDIS_SPACE_DB: int = Field(default=18)
REDIS_USER_ROLE_DB: Optional[int] = Field(default=21)
class _Security(BaseSettings):
ENCRYPTION_CONSTANTS_FILE_PATH: str = "scripts/config/mongo_encryption_constants.json"
USER_ENCRYPTION: bool = Field(default=True)
class _KeyPath(BaseSettings):
KEYS_PATH: Optional[pathlib.Path] = Field(default="data/keys")
PUBLIC: Optional[pathlib.Path]
PRIVATE: Optional[pathlib.Path]
@root_validator(allow_reuse=True)
def assign_values(cls, values):
if not os.path.isfile(os.path.join(values.get("KEYS_PATH"), "public")) or not os.path.isfile(
os.path.join(values.get("KEYS_PATH"), "private")
):
if not os.path.exists(values.get("KEYS_PATH")):
os.makedirs(values.get("KEYS_PATH"))
shutil.copy(os.path.join("assets", "keys", "public"), os.path.join(values.get("KEYS_PATH"), "public"))
shutil.copy(os.path.join("assets", "keys", "private"), os.path.join(values.get("KEYS_PATH"), "private"))
values["PUBLIC"] = os.path.join(values.get("KEYS_PATH"), "public")
values["PRIVATE"] = os.path.join(values.get("KEYS_PATH"), "private")
return values
DBConf = _Databases()
LoggVariables = _LoggVariables()
Security = _Security()
KeyPath = _KeyPath()
__all__ = [
"DBConf",
"LoggVariables",
"Security",
"KeyPath"
]
\ No newline at end of file
from datetime import datetime
DEFAULT_USER_ROLES = [{
"space_id": "space_099",
"type": "edit",
"user_role_id": "user_role_096",
"access_levels": {
"userManagement": {
"access_level": True
},
"approvalInbox": {
"access_level": True
},
"artifacts": {
"access_level": True
},
},
"user_role_description": "Admin",
"user_role_name": "Admin",
"user_role_permissions": {
"userManagement": {
"key": "userManagement",
"name": "User Management",
"create": True,
"delete": True,
"edit": True,
"view": True,
"children": {
"users": {
"key": "users",
"name": "User",
"create": True,
"delete": True,
"edit": True,
"view": True
}
}
},
"approvalInbox": {
"key": "approvalInbox",
"name": "Approval Inbox",
"create": True,
"delete": True,
"edit": True,
"view": True
},
"artifacts": {
"key": "artifacts",
"name": "artifacts",
"create": True,
"delete": True,
"edit": True,
"view": True
}
},
"catalogPermission": True
},
{
"space_id": "space_099",
"type": "edit",
"user_role_id": "user_role_097",
"access_levels": {
"userManagement": {
"access_level": True
},
"approvalInbox": {
"access_level": True
},
"artifacts": {
"access_level": True
},
},
"user_role_description": "Approver",
"user_role_name": "Approver",
"user_role_permissions": {
"userManagement": {
"key": "userManagement",
"name": "User Management",
"create": False,
"delete": False,
"edit": False,
"view": False,
"children": {
"users": {
"key": "users",
"name": "User",
"create": False,
"delete": False,
"edit": False,
"view": False
}
}
},
"approvalInbox": {
"key": "approvalInbox",
"name": "Approval Inbox",
"create": True,
"delete": True,
"edit": True,
"view": True
},
"artifacts": {
"key": "artifacts",
"name": "artifacts",
"create": True,
"delete": True,
"edit": True,
"view": True
}
},
"catalogPermission": True
},
{
"space_id": "space_099",
"type": "edit",
"user_role_id": "user_role_098",
"access_levels": {
"userManagement": {
"access_level": True
},
"approvalInbox": {
"access_level": True
},
"artifacts": {
"access_level": True
},
},
"user_role_description": "Operator",
"user_role_name": "Operator",
"user_role_permissions": {
"userManagement": {
"key": "userManagement",
"name": "User Management",
"create": False,
"delete": False,
"edit": False,
"view": False,
"children": {
"users": {
"key": "users",
"name": "User",
"create": False,
"delete": False,
"edit": False,
"view": False
}
}
},
"approvalInbox": {
"key": "approvalInbox",
"name": "Approval Inbox",
"create": False,
"delete": False,
"edit": False,
"view": False
},
"artifacts": {
"key": "artifacts",
"name": "artifacts",
"create": True,
"delete": True,
"edit": True,
"view": True
}
},
"catalogPermission": True
}
]
DEFAULT_USER = {
"encryption_salt": {"dt_0": [], "dt_1": []},
"name": "CatalogAdmin",
"username": "CatalogAdmin",
"password": "",
"email": "support@rockwellautomation.com",
"user_type": "catalog_user",
"phonenumber": 9581388594,
"userrole": ["user_role_096"],
"user_id": "user_097",
"created_on": 1735796769,
"created_by": "user_097",
"product_encrypted": False,
"failed_attempts": 0,
"is_user_locked": False,
"last_failed_attempt": "2021-05-13 08:56:15",
"ilens_encrypted": False,
"passwordReset": None,
"tz": None,
"expires_on": "02/12/21 09:00 30 AM",
"disable_user": False,
"last_logged_in": 1735796769,
"last_failed_login": None,
"fixed_delay": 0,
"variable_delay": 0,
"space_id": "space_099",
"default_user": True,
}
DEFAULT_SPACE = {
"space_id": "space_099",
"userrole": ["user_role_096"],
"created_by": "user_097",
"updated_time": datetime.utcnow().isoformat() + "Z", # UTC in ISO-8601 format
"user_id": "user_097",
"updated_by": "user_097",
}
\ No newline at end of file
{
"encrypt_collection_dict" : {
"user": {
"encrypt_keys": ["phonenumber", "email"],
"exclude_encryption": []}
}
}
class DatabaseNames:
ilens_asset_model = "ilens_asset_model"
ilens_configuration = "ilens_configuration"
catalog = "catalog"
catalog_meta = "catalog_meta"
class CollectionNames:
tags = "tags"
tag_groups = "tag_groups"
tag_category = "tag_category"
units = "units"
unit_group = "unit_group"
process_conf = "process_conf"
materials = "materials"
asset_model_details = "asset_model_details"
asset_model_rule_engine = "asset_model_rule_engine"
industry_category = "industry_category"
asset_model = "asset_model"
asset_model_mapping = "asset_model_mapping"
artifact_meta = "artifact_meta"
unique_id = "unique_id"
class AggregationKeys:
match = "$match"
meta = "$meta"
unwind = "$unwind"
data = "$data"
date = "$date"
group = "$group"
push = "$push"
sum = "$sum"
exists = "$exists"
cond = "$cond"
value = "$value"
regex = "$regex"
remove = "$$REMOVE"
root = "$$ROOT"
tostring = "$toString"
ifnull = "$ifNull"
limit = "$limit"
site_id = "$site_id"
concat = "$concat"
count = "$count"
expr = "$expr"
eq = "$eq"
skip = "$skip"
agg_and = "$and"
replace_root = "$replaceRoot"
literal = "$literal"
sort = "$sort"
first = "$first"
options = "$options"
user_role_id = "$user_role_id"
user_role_name = "$user_role_name"
user_id = "$user_id"
username = "$username"
project = "$project"
project_id = "$project_id"
status = "$status"
addfields = "$addFields"
lookup = "$lookup"
split = "$split"
current_status = "$current_status"
meta_created_at = "$meta.created_at"
meta_created_by = "$meta.created_by"
arrayelemat = "$arrayElemAt"
system_tag_type = "$system_tag_type"
arraytoobject = "$arrayToObject"
user_username = "$user.username"
step_data = "$step_data"
meta_createdat = "meta.created_at"
fullpath = "$full_path"
name = "$name"
merge_objects = "$mergeObjects"
version_comments = "$version_comments"
subtract = "$subtract"
artifact_id = "$artifact_id"
KEY_RESOURCES = "$resources"
KEY_RESOURCE_RESOURCE_NAME_PLAIN = "resources.resource_name"
KEY_RESOURCE_RESOURCE_DESC_PLAIN = "resources.resource_description"
KEY_RESOURCE_RESOURCE_PATH = "$resources.resource_path"
KEY_RESOURCE_CATEGORY = "$resources.category"
KEY_RESOURCE_SUB_CATEGORY = "$resources.sub_category"
KEY_RESOURCE_RESOURCE_NAME = "$resources.resource_name"
KEY_RESOURCE_RESOURCE_ID = "$resources.resource_id"
KEY_RESOURCE_RESOURCE_TYPE = "$resources.resource_type"
KEY_ASSET_MODEL_ID = "$asset_model_id"
KEY_ASSET_VERSION = "$asset_version"
KEY_DROWN_SKETCH_BODY_PAGE_TYPE = "drown_sketch.body.page_type"
\ No newline at end of file
This diff is collapsed.
from scripts.config import DBConf
from scripts.utils.mongo_utils import MongoConnect
source_mongo_client = MongoConnect(uri=DBConf.SOURCE_MONGO_URI)()
destination_mongo_client = MongoConnect(uri=DBConf.DESTINATION_MONGO_URI)()
from scripts.constants.db_constants import AggregationKeys
class AssetDetailAggregate:
@staticmethod
def asset_model_version_list(project_id, asset_model_id):
return [
{
AggregationKeys.match: {
"project_id": project_id,
"asset_model_id": asset_model_id,
"asset_rule": {AggregationKeys.exists: False},
}
},
{AggregationKeys.project: {"_id": 0, "version_list": 1}},
{AggregationKeys.project: {"version_list": 1, "value": {"$size": "$version_list"}}},
{
AggregationKeys.group: {
"_id": None,
"value": {AggregationKeys.push: AggregationKeys.value},
"name": {AggregationKeys.push: "$version_list"},
}
},
{
AggregationKeys.project: {
"_id": 0,
"version_list": {
AggregationKeys.arrayelemat: [
AggregationKeys.name,
{"$indexOfArray": [AggregationKeys.value, {"$max": AggregationKeys.value}]},
]
},
}
},
]
@staticmethod
def fetch_resource_list(artifact_id, filters=None, filter_key=False):
query = [
{
AggregationKeys.match: {
"artifact_id": {"$in": artifact_id},
}
},
{AggregationKeys.unwind: AggregationKeys.KEY_RESOURCES},
{AggregationKeys.project: {"_id": 0, "asset_model_id": 1, "asset_version": 1, "resources": 1}},
{
AggregationKeys.project: {
"resource_path": AggregationKeys.KEY_RESOURCE_RESOURCE_PATH,
"category": AggregationKeys.KEY_RESOURCE_CATEGORY,
"sub_category": AggregationKeys.KEY_RESOURCE_SUB_CATEGORY,
"resource_name": AggregationKeys.KEY_RESOURCE_RESOURCE_NAME,
"resource_description": "$resources.resource_description",
"resource_id": AggregationKeys.KEY_RESOURCE_RESOURCE_ID,
"resource_type": AggregationKeys.KEY_RESOURCE_RESOURCE_TYPE,
"is_svg": {"$ifNull": ["$resources.is_svg", False]},
"resource_config": "$resources.resource_config",
"asset_model_id": AggregationKeys.KEY_ASSET_MODEL_ID,
"asset_version": AggregationKeys.KEY_ASSET_VERSION,
}
},
]
if filter_key:
if filters.get("search"):
search_query = {
"$or": [
{
AggregationKeys.KEY_RESOURCE_RESOURCE_NAME_PLAIN: {
AggregationKeys.regex: filters["search"],
AggregationKeys.options: "i",
}
},
{
AggregationKeys.KEY_RESOURCE_RESOURCE_DESC_PLAIN: {
AggregationKeys.regex: filters["search"],
AggregationKeys.options: "i",
}
},
]
}
else:
search_query = {
"$or": [
{
AggregationKeys.KEY_RESOURCE_RESOURCE_NAME_PLAIN: {
AggregationKeys.regex: "",
AggregationKeys.options: "i",
}
},
{
AggregationKeys.KEY_RESOURCE_RESOURCE_DESC_PLAIN: {
AggregationKeys.regex: "",
AggregationKeys.options: "i",
}
},
]
}
query.insert(3, {AggregationKeys.match: search_query})
if filters.get("sort_by"):
if filters["sort_by"] == "asc":
sort_value = 1
else:
sort_value = -1
sort_query = {AggregationKeys.KEY_RESOURCE_RESOURCE_NAME_PLAIN: sort_value}
query.insert(4, {AggregationKeys.sort: sort_query})
return query
from pydantic import BaseModel
from scripts.db.redis_connection import destination_space_db
from scripts.utils.mongo_utils import MongoCollectionBaseClass
from scripts.constants.db_constants import (
CollectionNames,
DatabaseNames,
)
from scripts.logging import logger
class ArtifactMetaSchema(BaseModel):
artifact_id: str
name: str
artifact_type: str
ver: float
image: str
status: str
meta: dict
source_details: dict
comments: str
space_id: str
source_id: str
class ArtifactsMeta(MongoCollectionBaseClass):
def __init__(self, mongo_client, space_id=None):
super().__init__(
mongo_client, database=DatabaseNames.catalog, collection=CollectionNames.artifact_meta, space_db=destination_space_db
)
self.space_id = space_id
@property
def key_space_id(self):
return "space_id"
def fetch_artifacts_count(self):
try:
results = self.aggregate(
[
{"$match": {"status": "approved"}},
{
"$group": {
"_id": "$artifact_type",
"count": {"$sum": 1},
}
},
{
"$project": {
"name": "$_id",
"value": "$count",
"_id": 0,
}
},
]
)
if not results:
return None
return list(results)
except Exception as e:
logger.error(f"Error occurred in fetching artifacts due to {str(e)}")
def get_artifact_meta_by_aggregate(self, query: list):
return list(self.aggregate(pipelines=query))
def fetch_artifact_by_id(self, artifact_id):
return self.find_one({"artifact_id": artifact_id}, filter_dict={"_id": 0})
def get_artifact_latest_version(self, artifact_name, artifact_type):
artifact_older_record = self.find(
query={
"name": artifact_name,
"artifact_type": artifact_type,
},
filter_dict={"_id": 0, "ver": 1},
sort={"ver": -1},
limit=1,
)
artifact_older_record = list(artifact_older_record)
if artifact_older_record:
return f"{int(float(artifact_older_record[0]['ver']))+ 1}.0"
else:
return "1.0"
from typing import Dict, List, Optional
from pydantic import BaseModel
from scripts.db.redis_connection import destination_space_db
from scripts.utils.mongo_utils import MongoCollectionBaseClass
from scripts.constants.db_constants import (
CollectionNames,
DatabaseNames,
)
class AssetDetailSchema(BaseModel):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
asset_model_id: Optional[str] = ""
allow_editing: Optional[bool] = True
asset_description: Optional[str] = ""
asset_version: Optional[str] = ""
asset_model_type: Optional[str] = ""
asset_model_icon: Optional[str] = ""
parameters: Optional[Dict] = {}
parameters_new: Optional[Dict] = {}
processes: Optional[list] = []
device_models: Optional[List] = []
events: Optional[List] = []
resources: Optional[Dict] = {}
others: Optional[Dict] = {}
class AssetModelArtifacts(MongoCollectionBaseClass):
def __init__(self, mongo_client, space_id=None):
super().__init__(
mongo_client, database=DatabaseNames.catalog, collection=CollectionNames.asset_model, space_db=destination_space_db
)
self.project_id = space_id
@property
def key_asset_model_id(self):
return "asset_model_id"
@property
def key_asset_version(self):
return "asset_version"
@property
def key_space_id(self):
return "space_id"
def get_highest_asset_model_version(self, asset_model_name, space_id):
query = [
{"$match": {"asset_model_name": asset_model_name, "space_id": space_id}},
{
"$group": {
"_id": "$asset_model_id",
"highestVersion": {"$max": "$asset_version"},
}
},
]
res = list(self.aggregate(query))
if res:
return res[0].get("highestVersion", "0.0"), res[0].get("_id", "")
else:
return "0.0", ""
def insert_one_asset_detail(self, data):
"""
The following function will insert one asset in the
asset_list collections
:param self:
:param data:
:return:
"""
insert_data = data
return self.insert_one(insert_data)
def aggregate_asset_detail(self, filter_list: List):
if filter_list:
return self.aggregate(pipelines=filter_list)
from typing import Dict, List, Optional
from pydantic import BaseModel
from scripts.db.redis_connection import destination_space_db
from scripts.utils.mongo_utils import MongoCollectionBaseClass
from scripts.constants.db_constants import (
CollectionNames,
DatabaseNames,
)
class AssetModelMappingSchema(BaseModel):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
asset_model_id: Optional[str] = ""
asset_version: Optional[str] = ""
artifact_id: Optional[str] = ""
parameter_details: Optional[List] = []
process_details: Optional[List] = []
industry_details: Optional[Dict] = {}
class AssetModelMapping(MongoCollectionBaseClass):
def __init__(self, mongo_client, space_id=None):
super().__init__(
mongo_client,
database=DatabaseNames.catalog,
collection=CollectionNames.asset_model_mapping,
space_db=destination_space_db
)
self.project_id = space_id,
@property
def key_asset_model_id(self):
return "asset_model_id"
@property
def key_artifact_id(self):
return "artifact_id"
@property
def key_asset_version(self):
return "asset_version"
@property
def key_space_id(self):
return "space_id"
def find_asset_detail_by_id(
self, space_id, artifact_id=None, asset_id=None, asset_version=None, asset_name=None, filter_dict=None
):
query = {}
query.update({self.key_space_id: space_id})
if asset_id:
query.update({self.key_asset_model_id: asset_id})
if artifact_id:
query.update({self.key_artifact_id: artifact_id})
if asset_version:
query.update({self.key_asset_version: asset_version})
if asset_name:
query.update({"asset_model_name": asset_name})
asset_list = self.find_one(query=query, filter_dict=filter_dict)
if asset_list:
return asset_list
return {}
def insert_one_asset_detail(self, data):
"""
The following function will insert one asset in the
asset_list collections
:param self:
:param data:
:return:
"""
insert_data = data
return self.insert_one(insert_data)
from typing import Any, Union
from scripts.db.redis_connection import destination_space_db
from scripts.utils.mongo_utils import MongoCollectionBaseClass
from scripts.constants.db_constants import (
CollectionNames,
DatabaseNames,
)
from scripts.logging import logger
class AssetRuleEngineMeta(MongoCollectionBaseClass):
def __init__(self, mongo_client, space_id=None):
super().__init__(
mongo_client,
database=DatabaseNames.catalog,
collection=CollectionNames.asset_model_rule_engine,
space_db=destination_space_db
)
self.project_id = space_id,
@property
def key_space_id(self):
return "space_id"
def fetch_rule_details(self, list_of_asset_model_ids):
try:
pipeline = [
{"$match": {"asset_model_id": {"$in": list_of_asset_model_ids}}},
{
"$project": {
"_id": 0,
"ruleName": "$basic_info.ruleName",
"deviceDescription": "$basic_info.deviceDescription",
"Selected_ruleType": "$basic_info.Selected_ruleType",
"disable_all": {
"$cond": {
"if": {"$eq": ["$basic_info.disable_all", "True"]},
"then": "Disabled",
"else": "Enabled",
}
},
}
},
]
result = list(self.aggregate(pipeline))
if not result:
result = []
return result
except Exception as e:
logger.error(f"Error occurred in the fetch rule details due to {str(e)}")
def find_by_space(
self, space_id: str, projections=None, sort=None, query_dict=None, limit=None, skip=0, **filters
) -> Union[Any, None]:
query = {self.key_space_id: space_id}
if query_dict:
query |= query_dict
if filters:
query.update(filters)
records = self.find(query, projections, sort=sort, limit=limit, skip=skip)
return list(records) if records else None
from typing import Dict, List, Optional
from pydantic import BaseModel
from scripts.constants.db_constants import DatabaseNames, CollectionNames
from scripts.db.redis_connection import source_space_db
from scripts.utils.mongo_utils import MongoCollectionBaseClass
class AssetDetailSchema(BaseModel):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
asset_model_id: Optional[str] = ""
allow_editing: Optional[bool] = True
asset_description: Optional[str] = ""
asset_version: Optional[str] = ""
asset_model_type: Optional[str] = ""
asset_model_icon: Optional[str] = ""
parameters: Optional[Dict] = {}
parameters_new: Optional[Dict] = {}
processes: Optional[list] = []
device_models: Optional[List] = []
events: Optional[List] = []
resources: Optional[Dict] = {}
others: Optional[Dict] = {}
class AssetDetail(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(
mongo_client, database=DatabaseNames.ilens_asset_model, collection=CollectionNames.asset_model_details, space_db=source_space_db
)
self.space_id = project_id
@property
def key_project_id(self):
return "project_id"
@property
def key_asset_model_id(self):
return "asset_model_id"
@property
def key_asset_version(self):
return "asset_version"
def find_asset_detail_by_param(self, **query):
asset_list = self.find(query)
return asset_list
def find_asset_detail_by_id(self, project_id, asset_id=None, asset_version=None, asset_name=None, filter_dict=None):
query = {}
query.update({self.key_project_id: project_id})
if asset_id:
query.update({self.key_asset_model_id: asset_id})
if asset_version:
query.update({self.key_asset_version: asset_version})
if asset_name:
query.update({"asset_model_name": asset_name})
asset_list = self.find_one(query=query, filter_dict=filter_dict)
if asset_list:
return asset_list
return {}
def find_assets(self, query):
all_assets = self.find(query=query)
if all_assets:
return list(all_assets)
return []
def get_highest_asset_model_version(self, asset_model_name, project_id):
query = [
{"$match": {"asset_model_name": asset_model_name, "project_id": project_id}},
{
"$group": {
"_id": "$asset_model_id",
"highestVersion": {"$max": "$asset_version"},
}
},
]
res = list(self.aggregate(query))
if res:
return res[0].get("highestVersion", "0.0"), res[0].get("_id", "")
else:
return "0.0", ""
def insert_one_asset_detail(self, data):
"""
The following function will insert one asset in the
asset_list collections
:param self:
:param data:
:return:
"""
insert_data = data
return self.insert_one(insert_data)
def delete_one_asset_detail(self, asset_id, asset_version):
query = {}
if bool(asset_id):
query.update({self.key_asset_model_id: asset_id})
if bool(asset_version):
query.update({self.key_asset_version: asset_version})
if query:
return self.delete_one(query)
else:
return False
def delete_one_asset_rule(self, project_id, asset_id, asset_version, rule_engine_id):
query = {
self.key_project_id: project_id,
self.key_asset_model_id: asset_id,
self.key_asset_version: asset_version,
"rules.rule_engine_id": rule_engine_id,
}
return self.delete_one(query)
def update_review_status(self, asset_model_id, asset_version, action, project_id, user_id):
query = {
self.key_asset_model_id: asset_model_id,
self.key_asset_version: asset_version,
self.key_project_id: project_id,
}
return self.update_one(query=query, data={"status": action, "action_user": user_id}, upsert=False)
def update_asset_detail(self, asset_id, data, asset_version=None, project_id=None, upsert=False):
query = {self.key_asset_model_id: asset_id}
if asset_version:
query.update({self.key_asset_version: asset_version})
if project_id:
query.update({self.key_project_id: project_id})
return self.update_one(query=query, data=data, upsert=upsert)
def update_many_asset_detail(self, asset_id, data, asset_version=None, project_id=None, upsert=False):
query = {self.key_asset_model_id: asset_id}
if asset_version:
query.update({self.key_asset_version: asset_version})
if project_id:
query.update({self.key_project_id: project_id})
return self.update_many(query=query, data=data, upsert=upsert)
def aggregate_asset_detail(self, filter_list: List):
if filter_list:
return self.aggregate(pipelines=filter_list)
def match_asset_des(self, obj_req):
query = [
{"$match": {"asset_description": {"$regex": obj_req, "$options": "i"}}},
{"$project": {"asset_description": 1, "asset_model_icon": 1, "_id": 0}},
]
return list(self.aggregate(pipelines=query))
from scripts.constants.db_constants import DatabaseNames, CollectionNames
from scripts.db.redis_connection import source_space_db
from scripts.utils.mongo_utils import MongoCollectionBaseClass
class AssetModelRuleEngine(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(
mongo_client, database=DatabaseNames.ilens_asset_model, collection=CollectionNames.asset_model_rule_engine, space_db=source_space_db
)
self.space_id = project_id
from typing import Optional
from pydantic import BaseModel
from scripts.constants.db_constants import DatabaseNames, CollectionNames
from scripts.db.redis_connection import source_space_db
from scripts.utils.mongo_utils import MongoCollectionBaseClass
class IndustryCategorySchema(BaseModel):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
industry_category_name: str
description: Optional[str] = ""
industry_category_id: Optional[str] = ""
upload_icon: Optional[str] = ""
is_deleted: Optional[str] = ""
class IndustryCategory(MongoCollectionBaseClass):
def __init__(self, mongo_client):
super().__init__(
mongo_client, database=DatabaseNames.ilens_asset_model, collection=CollectionNames.industry_category, space_db=source_space_db
)
def find_one_industry_category(self, filter_dict=None, **query) -> IndustryCategorySchema:
"""
The following function will give one industry_category for a given set of
search parameters as keyword arguments
:param filter_dict:
:param query:
:return:
"""
industry = self.find_one(filter_dict=filter_dict, query=query)
if industry:
return IndustryCategorySchema(**industry)
else:
return industry
def insert_one_industry_category(self, data):
"""
The following function will insert one industry_category in the
industry_category collections
:param data:
:return:
"""
return self.insert_one(data)
def update_one_industry_category(self, data, query):
"""
The following function will update one industry_category in
industry_category collection based on the given query
:param data:
:param upsert:
:param query:
:return:
"""
return self.update_one(data=data, upsert=False, query=query)
def update_many_industry_category(self, data, query):
"""
The following function will update many industry_category in
industry_category collection based on the given query
:param data:
:param upsert:
:param query:
:return:
"""
return self.update_many(data=data, upsert=False, query=query)
def find_all_industry_category(self, sort=None, skip=0, limit=None, **query):
"""
The following function will give all industry_category for the given set of
search parameters as keyword arguments
:param sort:
:param skip:
:param limit:
:param query:
:return:
"""
filter_dict = {
"industry_category_name": 1,
"industry_category_id": 1,
"description": 1,
"is_deleted": 1,
"upload_icon": 1,
}
response = self.find(filter_dict=filter_dict, sort=sort, skip=skip, limit=limit, query=query)
if not response:
return []
return list(response)
def find_industry_list(self):
response = self.find({"is_deleted": False})
if not response:
return []
return list(response)
def fetch_all_industry_category(self, query):
"""
The following function will give all industry_category for the given set of
search parameters as keyword arguments
:param query:
:return:
"""
response = self.find(query=query)
return list(response) if response else []
from scripts.db.redis_connection import source_space_db
from scripts.utils.mongo_utils import MongoCollectionBaseClass
from scripts.constants.db_constants import DatabaseNames, CollectionNames
class Materials(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_configuration, collection=CollectionNames.materials, space_db=source_space_db)
self.space_id = project_id
from scripts.constants.db_constants import CollectionNames, DatabaseNames
from scripts.db.redis_connection import source_space_db
from scripts.utils.mongo_utils import MongoCollectionBaseClass
class ProcessConf(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(
mongo_client, database=DatabaseNames.ilens_configuration, collection=CollectionNames.process_conf, space_db=source_space_db
)
self.space_id = project_id
def find_all_processes(self, query=None):
"""
The following function will give all tags for the given set of
search parameters as keyword arguments
:param self:
:param query:
:return:
"""
all_process = self.find(query=query)
if not all_process:
return []
return list(all_process)
def find_by_process_id(self, process_id):
"""
The following function will give one process for a given set of
search parameters as keyword arguments
:param process_id:
:return:
"""
one_process = self.find_one(query={"process_id": process_id})
return one_process
def find_by_project_id_process_name(self, process_name, project_id):
"""
The following function will give one process for a given set of
search parameters as keyword arguments
:param process_id:
:return:
"""
one_process = self.find_one(query={"process_name": process_name, "project_id": project_id})
return one_process
def find_by_project_id(self, project_id):
"""
The following function will give one process for a given set of
search parameters as keyword arguments
:param project_id:
:return:
"""
query = {"customer_project_id": project_id}
many_process = self.find(query=query)
if not bool(many_process):
return []
return list(many_process)
def insert_one_process(self, data):
"""
The following function will insert one tag in the
tags collections
:param self:
:param data:
:return:
"""
return self.insert_one(data)
def insert_many_process(self, data):
"""
The following function will insert many tags in the
tags collection
:param self:
:param data:
:return:
"""
return self.insert_many(data)
def update_one_process(self, process_id, data, upsert=False):
"""
The following function will update one tag in
tags collection based on the given query
"""
query_dict = {"process_id": process_id}
return self.update_one(data=data, query=query_dict, upsert=upsert)
def delete_many_process(self, query):
"""
The following function will delete many tag in
tags collection based on the given query
:param self:
:param query:
:return:
"""
if bool(query):
response = self.delete_many(query=query)
return response
else:
return False
def delete_one_process(self, process_id):
"""
The following function will delete one tag in
tags collection based on the given query
:param process_id:
:return:
"""
if process_id:
return self.delete_one(query={"process_id": process_id})
else:
return False
def find_process_by_aggregate(self, query):
process = self.aggregate(query)
if not process:
return []
return list(process)
from scripts.constants.db_constants import CollectionNames, DatabaseNames
from scripts.db.redis_connection import source_space_db
from scripts.utils.mongo_utils import MongoCollectionBaseClass
class TagCategory(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_configuration, collection=CollectionNames.tag_category, space_db=source_space_db)
self.space_id = project_id
from scripts.constants.db_constants import CollectionNames, DatabaseNames
from scripts.db.redis_connection import source_space_db
from scripts.utils.mongo_utils import MongoCollectionBaseClass
class TagGroups(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_configuration, collection=CollectionNames.tag_groups, space_db=source_space_db)
self.space_id = project_id
from scripts.constants.db_constants import CollectionNames, DatabaseNames
from scripts.db.redis_connection import source_space_db
from scripts.utils.mongo_utils import MongoCollectionBaseClass
class Tags(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_configuration, collection=CollectionNames.tags, space_db=source_space_db)
self.space_id = project_id
@property
def key_tag_id(self):
return "id"
@property
def key_tag_name(self):
return "tag_name"
def find_name_by_id(self, tag_id: str):
query = {self.key_tag_id: tag_id}
filter_dict = {self.key_tag_name: 1, "_id": 0}
record = self.find_one(query, filter_dict)
if not record:
return None
return record[self.key_tag_name]
def find_all_tags(self, sort=None, skip=0, limit=None, **query):
"""
The following function will give all tags for the given set of
search parameters as keyword arguments
:param sort:
:param skip:
:param limit:
:param query:
:return:
"""
filter_dict = {"id": 1, "tag_name": 1, "tag_category_name": 1, "tag_category_id": 1, "description": 1, "_id": 0}
response = self.find(filter_dict=filter_dict, sort=sort, skip=skip, limit=limit, query=query)
if not response:
return []
return list(response)
def find_tags(self, query):
all_tags = self.find(query=query)
if all_tags:
return list(all_tags)
return []
def find_tags_by_aggregate(self, query):
tags = self.aggregate(query)
if not tags:
return []
return list(tags)
def insert_one_tag(self, data):
"""
The following function will insert one tag in the
tags collections
:param data:
:return:
"""
return self.insert_one(data)
def insert_many_tags(self, data):
"""
The following function will insert many tags in the
tags collection
:param data:
:return:
"""
return self.insert_many(data)
from scripts.db.redis_connection import source_space_db
from scripts.utils.mongo_utils import MongoCollectionBaseClass
from scripts.constants.db_constants import DatabaseNames, CollectionNames
class Units(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_configuration, collection=CollectionNames.units, space_db=source_space_db)
self.space_id = project_id
from scripts.db.redis_connection import source_space_db
from scripts.utils.mongo_utils import MongoCollectionBaseClass
from scripts.constants.db_constants import DatabaseNames, CollectionNames
class UnitGroups(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DatabaseNames.ilens_configuration, collection=CollectionNames.unit_group, space_db=source_space_db)
self.space_id = project_id
from ut_redis_connector import RedisConnector
from scripts.config import DBConf
source_connector = RedisConnector(DBConf.SOURCE_REDIS_URI)
destination_connector = RedisConnector(DBConf.DESTINATION_REDIS_URI)
destination_space_db = destination_connector.connect(db=int(DBConf.REDIS_SPACE_DB), decode_responses=True)
source_space_db = source_connector.connect(db=int(DBConf.REDIS_SPACE_DB), decode_responses=True)
import re
from typing import Any, Dict, List, Optional, Union
from pydantic import BaseModel
from ut_mongo_util import CollectionBaseClass
from scripts.utils.decryption_util import MongoDataEncryption
from scripts.utils.mongo_utils import MongoCollectionBaseClass as UtilsMongoCollection
class UserCollectionKeys:
KEY_LANGUAGE = "language"
KEY_NAME = "name"
KEY_USER_ID = "user_id"
KEY_SPACE_ID = "space_id"
KEY_USERNAME = "username"
KEY_USER_ROLE = "userrole"
KEY_EMAIL = "email"
class UserSchema(BaseModel):
name: Optional[str] = ""
space_id: Optional[str] = ""
username: Optional[str] = ""
password: Optional[str] = ""
email: Optional[Any] = None
phonenumber: Optional[Any] = None
userrole: Optional[List[str]] = None
user_type: Optional[str] = ""
user_id: Optional[str] = ""
created_by: Optional[str] = ""
encryption_salt: Optional[Dict] = {}
passwordReset: Optional[Dict] = {}
failed_attempts: Optional[int] = 0
is_user_locked: Optional[bool] = False
last_failed_login: Optional[int] = 0
last_logged_in: Optional[int] = 0
last_failed_attempt: Optional[str] = ""
expires_on: Optional[str] = ""
disable_user: Optional[bool] = False
default_user: Optional[bool] = False
created_on: Optional[int] = 0
updated_by: Optional[str] = ""
updated_on: Optional[int] = 0
secret: Optional[str] = ""
password_added_on: Optional[int] = 0
default_space: Optional[str] = ""
fixed_delay: Optional[int] = 0
variable_delay: Optional[int] = 0
class User(CollectionBaseClass):
def __init__(self, mongo_client, space_id=None):
super().__init__(mongo_client, database="catalog_meta", collection="user")
self.space_id = space_id
self.key_user_id = UserCollectionKeys.KEY_USER_ID
self.key_space_id = UserCollectionKeys.KEY_SPACE_ID
self.key_username = UserCollectionKeys.KEY_USERNAME
self.key_email = UserCollectionKeys.KEY_EMAIL
self.find_decrypted = UtilsMongoCollection.find_decrypted.__get__(self, UtilsMongoCollection)
self.get_decrypted_records = UtilsMongoCollection.get_decrypted_records.__get__(self, UtilsMongoCollection)
self.data_encryption = MongoDataEncryption()
def update_user(self, query, data):
"""
The following function will update target details in rule_targets collections
:param self:
:param data:
:return:
"""
return self.update_one(query=query, data=data, upsert=True)
def insert_one_user(self, data):
"""
The following function will insert one user in the
user collections
:param self:
:param data:
:return:
"""
data = self.data_encryption.encrypt_data(data, collection_name="user")
return self.insert_one(data)
def find_user(self, space_id, user_id=None, username=None, email=None, filter_dict=None):
query = {}
if user_id:
query[self.key_user_id] = user_id
if username:
query[self.key_username] = username
if email:
query[self.key_email] = re.compile(email, re.IGNORECASE)
query[self.key_email] = email
user = self.find_decrypted(query=query, filter_dict=filter_dict)
if user:
return UserSchema(**user)
return user
@staticmethod
def get_users_list(space_id=None):
query_json = [
{
"$group": {
"_id": None,
"data": {"$push": {"k": {"$ifNull": ["$user_id", ""]}, "v": {"$ifNull": ["$username", ""]}}},
}
},
{"$replaceRoot": {"newRoot": {"$arrayToObject": "$data"}}},
]
if space_id:
query_json.insert(0, {"$match": {"space_id": space_id}})
return query_json
def users_list_by_aggregate(self, query: list):
return self.aggregate(pipelines=query)
def find_user_by_space_id(self, user_id, space_id):
user = self.find_one(query={self.key_user_id: user_id, self.key_space_id: space_id})
if user:
return dict(user)
return user
def get_all_users(self, filter_dict=None, sort=None, skip=0, limit=None, **query):
users = self.find(filter_dict=filter_dict, sort=sort, skip=skip, limit=limit, query=query)
if users:
return list(users)
return []
def find_user_role_for_user_id(self, user_id, space_id):
query = {"user_id": user_id, "space_id": space_id}
filter_dict = {"userrole": 1, "_id": 0}
return self.find_one(query=query, filter_dict=filter_dict)
def find_base_user(self, space_id=None, user_id=None, username=None, email=None, filter_dict=None):
query = {}
if space_id:
query[self.key_space_id] = space_id
if user_id:
query[self.key_user_id] = user_id
if username:
query[self.key_username] = username
if email:
query[self.key_email] = re.compile(email, re.IGNORECASE)
if not (user := self.find_decrypted(query=query, filter_dict=filter_dict)):
return user
try:
return UserSchema(**user)
except Exception:
return user
def find_by_space(
self,
projections=None,
sort=None,
query_dict=None,
limit=None,
skip=0,
**filters,
) -> Union[Any, None]:
query = {}
if query_dict:
query |= query_dict
if filters:
query.update(filters)
records = self.find(query, projections, sort=sort, limit=limit, skip=skip)
if records:
records = self.get_decrypted_records(records)
return list(records) if records else []
def delete_one_user(self, user_id, space_id):
return self.delete_one(query={self.key_user_id: user_id, self.key_space_id: space_id})
def update_one_user(self, query, data):
"""
The following function will insert one user in the
user collections
:param self:
:param data:
:return:
"""
data = self.data_encryption.encrypt_data(data, collection_name="user")
return self.update_one(query=query, data=data, upsert=True)
def get_data_by_aggregate(self, query_json: list):
if aggregate_data := list(self.aggregate(query_json)):
aggregate_data = self.get_decrypted_records(aggregate_data)
return aggregate_data
return []
def find_by_aggregate(self, query_json: list):
if user_by_aggregate := list(self.aggregate(query_json)):
return user_by_aggregate
return []
def distinct_user(self, query_key, filter_json):
query = {self.key_user_id: filter_json}
return self.distinct(query_key=query_key, filter_json=query)
def find_user_by_param(self, **query):
user = self.get_decrypted_records(self.find(query))
if not bool(user):
user = []
return user
from typing import Optional
from ut_mongo_util import CollectionBaseClass
class UserCollectionKeys:
KEY_LANGUAGE = "language"
KEY_NAME = "name"
KEY_USER_ID = "user_id"
KEY_SPACE_ID = "space_id"
KEY_USERNAME = "username"
KEY_USER_ROLE = "user_role_name"
KEY_EMAIL = "email"
class UserRole(CollectionBaseClass):
def __init__(self, mongo_client, space_id=None):
super().__init__(mongo_client, database="catalog_meta", collection="user_role")
self.space_id = space_id
self.key_user_id = UserCollectionKeys.KEY_USER_ID
self.key_space_id = UserCollectionKeys.KEY_SPACE_ID
def update_user_role(self, query, data):
"""
The following function will update target details in rule_targets collections
:param self:
:param data:
:return:
"""
return self.update_one(query=query, data=data, upsert=True)
def find_user(self, user_id):
user = self.find_one(query={"user_id": user_id})
if user:
return dict(user)
return user
def find_user_name(self, user_id, space_id: Optional[str]):
query = {"user_role_id": user_id, "space_id": space_id}
one_user = self.find_one(filter_dict={"user_role_name": 1, "_id": 0}, query=query)
if one_user is None:
return one_user
return one_user["user_role_name"]
@staticmethod
def get_users_list(space_id=None):
query_json = [
{
"$group": {
"_id": None,
"data": {"$push": {"k": {"$ifNull": ["$user_id", ""]}, "v": {"$ifNull": ["$username", ""]}}},
}
},
{"$replaceRoot": {"newRoot": {"$arrayToObject": "$data"}}},
]
if space_id:
query_json.insert(0, {"$match": {"space_id": space_id}})
return query_json
def users_list_by_aggregate(self, query: list):
return self.aggregate(pipelines=query)
def find_user_by_space_id(self, user_id, space_id):
user = self.find_one(query={self.key_user_id: user_id, self.key_space_id: space_id})
if user:
return dict(user)
return user
def find_user_role_by_id(self, user_role_id, filter_dict=None):
return self.find_one(query={"user_role_id": user_role_id}, filter_dict=filter_dict)
def find_user_role_by_aggregate(self, query):
if role_by_aggregate := list(self.aggregate(query)):
return role_by_aggregate
return []
from ut_mongo_util import CollectionBaseClass, mongo_client
class UserCollectionKeys:
KEY_LANGUAGE = "language"
KEY_NAME = "name"
KEY_USER_ID = "user_id"
KEY_SPACE_ID = "space_id"
KEY_USERNAME = "username"
KEY_USER_ROLE = "userrole"
class UserSpace(CollectionBaseClass):
key_username = UserCollectionKeys.KEY_USERNAME
key_user_id = UserCollectionKeys.KEY_USER_ID
key_language = UserCollectionKeys.KEY_LANGUAGE
key_name = UserCollectionKeys.KEY_NAME
key_space_id = UserCollectionKeys.KEY_SPACE_ID
def __init__(self):
super().__init__(
mongo_client,
database="catalog_meta",
collection="user_space",
)
def fetch_user_space(self, user_id, space_id):
query = {self.key_user_id: user_id, self.key_space_id: space_id}
user = self.find_one(query=query)
return user
def fetch_user_space_with_details(self, user_id, space_id):
query = [
{"$match": {"user_id": user_id, "space_id": space_id}},
{"$lookup": {"from": "user", "localField": "user_id", "foreignField": "user_id", "as": "user_details"}},
{"$unwind": {"path": "$user_details"}},
{
"$project": {
"space_id": 1,
"AccessLevel": 1,
"access_group_ids": 1,
"userrole": 1,
"user_id": 1,
"name": "$user_details.name",
"email": "$user_details.email",
"username": "$user_details.username",
}
},
]
user = self.aggregate(query)
user_list = list(user)
if user_list:
return user_list[0]
else:
return None
def find_user_role_for_user_id(self, user_id, space_id):
query = {"user_id": user_id, "space_id": space_id}
filter_dict = {"userrole": 1, "_id": 0}
return self.find_one(query=query, filter_dict=filter_dict)
def update_one_user_space(self, data, user_id, space_id):
query = {self.key_user_id: user_id, "space_id": space_id}
return self.update_one(query=query, data=data, upsert=True)
def insert_one_user(self, data):
"""
The following function will insert one user in the
user collections
:param self:
:param data:
:return:
"""
return self.insert_one(data)
def delete_one_user_space(self, user_id, space_id):
return self.delete_one(query={self.key_user_id: user_id, self.key_space_id: space_id})
from typing import Dict, Optional
from pydantic import BaseModel
from scripts.utils.mongo_utils import MongoCollectionBaseClass
class WorkSpacesSchema(BaseModel):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
space_id: Optional[str] = ""
space_name: Optional[str] = ""
space_type: Optional[str] = ""
meta: Optional[Dict] = {}
user_id: Optional[str] = ""
source_meta: Optional[Dict] = {}
access_token: Optional[str] = ""
catalog_url: Optional[str] = ""
class WorkSpaces(MongoCollectionBaseClass):
def __init__(self, mongo_client):
super().__init__(mongo_client, database="catalog_meta", collection="workspaces")
@property
def key_space_id(self):
return "space_id"
@property
def key_space_name(self):
return "space_name"
def find_space(self, space_id=None, space_name=None, filter_dict=None):
"""
The following function will give one record for a given set of
search parameters as keyword arguments
:param space_name:
:param filter_dict:
:param space_id:
:return:
"""
query = {}
if space_id:
query.update({self.key_space_id: space_id})
if space_name:
query.update({self.key_space_name: space_name})
record = self.find_one(query=query, filter_dict=filter_dict)
if not record:
return {}
return WorkSpacesSchema(**record).dict()
def find_space_by_query(self, query, filter_dict=None):
record = self.find(query=query, filter_dict=filter_dict)
if record:
return record
return []
def fetch_space_details(self):
query = {}
filter_dict = {self.key_space_id: 1, "_id": 0, self.key_space_name: 1}
records = self.find(query=query, filter_dict=filter_dict)
if records:
space_name_mapp = {}
for record in records:
space_name_mapp[record.get(self.key_space_id)] = record.get(self.key_space_name)
return space_name_mapp
return {}
def insert_one_space(self, data):
"""
The following function will insert one space in the
customer_spaces collections
:param self:
:param data:
:return:
"""
return self.insert_one(data)
def delete_one_space(self, space_id):
if space_id:
query = {self.key_space_id: space_id}
return self.delete_one(query)
else:
return False
def get_space_data_by_aggregate(self, query: list):
return list(self.aggregate(pipelines=query))
def update_one_space(self, data, space_id, upsert=False):
"""
The following function will update one step in
steps collection based on the given query
:param data:
:param upsert:
:param space_id:
:return:
"""
query = {"space_id": space_id}
response = self.update_one(data=data, upsert=upsert, query=query)
return response
def delete_workspaces(self, space_id_list):
query = {self.key_space_id: {"$in": space_id_list}}
response = self.delete_many(query)
return response.deleted_count
class ILensErrors(Exception):
def __init__(self, msg):
Exception.__init__(self, msg)
"""
Base Error Class
"""
class ILensErrorsWithoutMessage(Exception):
"""Generic iLens Error"""
class ErrorMessages:
ERROR001 = "Authentication Failed. Please verify token"
ERROR002 = "Signature Expired"
ERROR003 = "Signature Not Valid"
ERROR004 = "User Record Not Found"
WORKSPACE_NAME_EXIST_ERROR = "Workspace Name Exist. Please Use different name"
WORKSPACE_CATALOG_URL_ERROR = "Please add valid catalog url"
class JobCreationError(Exception):
"""
Raised when a Job Creation throws an exception.
Job Creation happens by adding a record to Mongo.
"""
class UnknownError(Exception):
pass
class DuplicateSpaceNameError(Exception):
pass
class KairosDBError(Exception):
pass
class UnauthorizedError(Exception):
pass
class ImageValidation(Exception):
pass
class ILensError(Exception):
pass
class NameExists(Exception):
pass
class InputRequestError(ILensError):
pass
class IllegalTimeSelectionError(ILensError):
pass
class DataNotFound(Exception):
pass
class AuthenticationError(ILensError):
"""
JWT Authentication Error
"""
class JWTDecodingError(Exception):
pass
class DuplicateReportNameError(Exception):
pass
class PathNotExistsException(Exception):
pass
class ImplementationError(Exception):
pass
class UserRoleNotFoundException(Exception):
pass
class CustomError(Exception):
pass
class IllegalToken(ILensErrors):
pass
class InvalidPasswordError(ILensErrors):
pass
class UserNotFound(ILensErrors):
pass
class TooManyRequestsError(Exception):
pass
class FixedDelayError(ILensErrors):
pass
class VariableDelayError(ILensErrors):
pass
class LicenceValidationError(Exception):
pass
class CustomAppError:
FAILED_TO_SAVE = "Failed to save app"
class WorkspaceNameExistError(ILensErrorsWithoutMessage):
pass
class GlobalCatalogError(Exception):
"""Generic GlobalcatalogErrors Error"""
def __init__(self, msg):
Exception.__init__(self, msg)
"""
Base Error Class
"""
class ILensException(Exception):
pass
class MongoException(ILensException):
pass
class MongoConnectionException(MongoException):
pass
class MongoQueryException(MongoException):
pass
class MongoEncryptionException(MongoException):
pass
class MongoRecordInsertionException(MongoQueryException):
pass
class MongoFindException(MongoQueryException):
pass
class MongoDeleteException(MongoQueryException):
pass
class MongoUpdateException(MongoQueryException):
pass
class MongoUnknownDatatypeException(MongoEncryptionException):
pass
class MongoDistictQueryException(MongoException):
pass
class MongoFindAndReplaceException(MongoException):
pass
class MongoObjectDeserializationException(MongoException):
pass
import logging
import os
from logging import StreamHandler
from logging.handlers import RotatingFileHandler, SocketHandler
import yaml
from scripts.config import LoggVariables
# this method is to read the configuration from backup.conf
def read_configuration(file_name):
"""
:param file_name:
:return: all the configuration constants
"""
with open(file_name) as stream:
try:
return yaml.safe_load(stream)
except Exception as e:
print(f"Failed to load Configuration. Error: {e}")
config = read_configuration("scripts/logging/logger_conf.yml")
logging_config = config["logger"]
logging_config["level"] = LoggVariables.LOG_LEVEL
def add_logging_level(level_name, level_num, method_name=None):
"""
Comprehensively adds a new logging level to the `logging` module and the
currently configured logging class.
`level_name` becomes an attribute of the `logging` module with the value
`level_num`. `method_name` becomes a convenience method for both `logging`
itself and the class returned by `logging.getLoggerClass()` (usually just
`logging.Logger`). If `method_name` is not specified, `level_name.lower()` is
used.
To avoid accidental clobbering of existing attributes, this method will
raise an `AttributeError` if the level name is already an attribute of the
`logging` module or if the method name is already present
Example
-------
> add_logging_level('TRACE', logging.DEBUG - 5)
> logging.getLogger(__name__).setLevel("TRACE")
> logging.getLogger(__name__).trace('that worked')
> logging.trace('so did this')
> logging.TRACE
"""
if not method_name:
method_name = level_name.lower()
if hasattr(logging, level_name):
raise AttributeError("{} already defined in logging module".format(level_name))
if hasattr(logging, method_name):
raise AttributeError("{} already defined in logging module".format(method_name))
if hasattr(logging.getLoggerClass(), method_name):
raise AttributeError("{} already defined in logger class".format(method_name))
def log_for_level(self, message, *args, **kwargs):
if self.isEnabledFor(level_num):
self._log(level_num, message, args, **kwargs)
def log_to_root(message, *args, **kwargs):
logging.log(level_num, message, *args, **kwargs)
logging.addLevelName(level_num, level_name)
setattr(logging, level_name, level_num)
setattr(logging.getLoggerClass(), method_name, log_for_level)
setattr(logging, method_name, log_to_root)
def get_logger():
"""
Creates a rotating log
"""
__logger__ = logging.getLogger("ilens")
add_logging_level("QTRACE", logging.DEBUG - 5)
__logger__.setLevel(logging_config["level"].upper())
log_formatter = "%(asctime)s - %(levelname)-6s - [%(threadName)5s:%(funcName)5s():" + "%(lineno)s] - %(message)s"
time_format = "%Y-%m-%d %H:%M:%S"
file_path = LoggVariables.LOGS_MODULE_PATH
formatter = logging.Formatter(log_formatter, time_format)
for each_handler in logging_config["handlers"]:
if each_handler["type"] in ["RotatingFileHandler"] and LoggVariables.ENABLE_FILE_LOG:
if not os.path.exists(file_path):
os.makedirs(file_path)
log_file = os.path.join(file_path, f"{logging_config['name']}.log")
temp_handler = RotatingFileHandler(
log_file, maxBytes=each_handler["max_bytes"], backupCount=each_handler["back_up_count"]
)
temp_handler.setFormatter(formatter)
elif each_handler["type"] in ["SocketHandler"]:
temp_handler = SocketHandler(each_handler["host"], each_handler["port"])
elif each_handler["type"] in ["StreamHandler"] and LoggVariables.ENABLE_CONSOLE_LOG:
temp_handler = StreamHandler()
temp_handler.setFormatter(formatter)
else:
temp_handler = None
if temp_handler:
__logger__.addHandler(temp_handler)
return __logger__
logger = get_logger()
logger:
name: catalog_default_meta
level: INFO
handlers:
- type: RotatingFileHandler
file_path: data/catalog_default_meta/logs/
max_bytes: 100000000
back_up_count: 5
- type: SocketHandler
host: localhost
port: 23582
- type: StreamHandler
name: catalog_default_meta
import time
from scripts.db.mongo import destination_mongo_client
from scripts.db.redis_connection import destination_space_db
from scripts.utils.mongo_utils import MongoCollectionBaseClass
from scripts.constants.db_constants import (
CollectionNames,
DatabaseNames,
)
from scripts.logging import logger
class CommonUtils:
def __init__(self, space_id=None):
self.unique_id = MongoCollectionBaseClass(
mongo_client=destination_mongo_client,
database=DatabaseNames.catalog_meta,
collection=CollectionNames.unique_id,
space_db=destination_space_db
)
self.unique_id.space_id = space_id
@staticmethod
def get_user_meta(user_id=None, check_flag=False):
data_for_meta = {}
if check_flag:
data_for_meta["created_by"] = user_id
data_for_meta["created_on"] = int(time.time() * 1000)
data_for_meta["updated_by"] = user_id
data_for_meta["updated_on"] = int(time.time() * 1000)
return data_for_meta
def get_next_id(self, param):
try:
next_id_doc = self.unique_id.find_one(query={"key": param})
if not next_id_doc:
insert_dict = {"key": param, "id": 100}
self.unique_id.insert_one(data=insert_dict)
return insert_dict["id"]
else:
query = {"key": param}
count_value = int(next_id_doc["id"]) + 1
new_values = {"id": count_value}
self.unique_id.update_one(query=query, data=new_values, upsert=True)
return int(new_values["id"])
except Exception as e:
logger.exception(f"Exception in creating ID: {e}")
return None
\ No newline at end of file
import json
from functools import lru_cache
@lru_cache()
def get_db_name(redis_client, space_id: str, database: str, delimiter="__"):
if not space_id:
return database
val = redis_client.get(space_id)
if val is None:
raise ValueError(f"Unknown Space, Space ID: {space_id} Not Found!!!")
val = json.loads(val)
if not val:
return database
# Get the prefix flag to apply space_id prefix to any db
prefix_condition = bool(val.get("source_meta", {}).get("add_prefix_to_database"))
if prefix_condition:
# Get the prefix name from mongo or default to space_id
prefix_name = val.get("source_meta", {}).get("prefix") or space_id
return f"{prefix_name}{delimiter}{database}"
return database
@lru_cache()
def get_redis_db_prefix(redis_client, space_id: str, delimiter="__"):
if not space_id:
return False
val = redis_client.get(space_id)
if val is None:
return False
val = json.loads(val)
if not val:
return False
# Get the prefix flag to apply space_id prefix to any db
prefix_condition = bool(val.get("source_meta", {}).get("add_prefix_to_database"))
if prefix_condition:
# Get the prefix name from mongo or default to space_id
prefix_name = val.get("source_meta", {}).get("prefix") or space_id
return f"{prefix_name}{delimiter}"
return False
def get_space_data_from_redis(redis_client, space_id: str):
record = redis_client.get(space_id)
if record is None:
raise ValueError(f"Unknown Space, Space ID: {space_id} Not Found!!!")
if record := json.loads(record):
return record
This diff is collapsed.
import jwt
from jwt.exceptions import (
ExpiredSignatureError,
InvalidSignatureError,
MissingRequiredClaimError,
)
from scripts.config import KeyPath
from scripts.errors import AuthenticationError, ErrorMessages
from scripts.logging import logger
class Secrets:
LOCK_OUT_TIME_MINS = 30
leeway_in_mins = 10
unique_key = "45c37939-0f75"
token = "8674cd1d-2578-4a62-8ab7-d3ee5f9a"
issuer = "ilens"
alg = "RS256"
SECRET_FOR_SUPPORT_LENS = "WeSupport24X7UnifyTwinX#"
ISS = "unifytwin"
AUD = "supportlens"
signature_key = "kliLensKLiLensKL"
signature_key_alg = ["HS256"]
class JWT:
def __init__(self):
self.max_login_age = Secrets.LOCK_OUT_TIME_MINS
self.issuer = Secrets.issuer
self.alg = Secrets.alg
self.public = KeyPath.PUBLIC
self.private = KeyPath.PRIVATE
def encode(self, payload):
try:
with open(self.private) as f:
key = f.read()
return jwt.encode(payload, key, algorithm=self.alg)
except Exception as e:
logger.exception(f"Exception while encoding JWT: {str(e)}")
raise
finally:
f.close()
def decode(self, token):
try:
with open(self.public) as f:
key = f.read()
return jwt.decode(token, key, algorithms=self.alg)
except Exception as e:
logger.exception(f"Exception while encoding JWT: {str(e)}")
raise
finally:
f.close()
def validate(self, token):
try:
with open(self.public) as f:
key = f.read()
payload = jwt.decode(
token,
key,
algorithms=self.alg,
leeway=Secrets.leeway_in_mins,
options={"require": ["exp", "iss"]},
)
return payload
except InvalidSignatureError:
raise AuthenticationError(ErrorMessages.ERROR003)
except ExpiredSignatureError:
raise AuthenticationError(ErrorMessages.ERROR002)
except MissingRequiredClaimError:
raise AuthenticationError(ErrorMessages.ERROR002)
except Exception as e:
logger.exception(f"Exception while validating JWT: {str(e)}")
raise
finally:
f.close()
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment