Commit f8f8240a authored by tarun.madamanchi's avatar tarun.madamanchi

first commit

parents
# Mongo
MONGO_URI=mongodb://dev-mongo-readwrite:dev-mongo-readwrite%40123@192.168.0.220:31589/?directConnection=true
# POSTGRES
POSTGRES_URI=postgresql://admin:UtAdm%23Post271486@192.168.0.220:31514
\ No newline at end of file
database,collection,name,key,unique,sparse,expireAfterSeconds,partialFilterExpression,collation
ilens_assistant,task_instance_data,task_id_1_stage_id_1,"[('task_id', 1), ('stage_id', 1)]",False,False,,None,None
ilens_assistant,task_instances,task_id_1_logbook_id_1,"[('task_id', 1), ('logbook_id', 1)]",False,False,,None,None
ilens_assistant,periodic_data,date_1_step_id_1,"[('date', 1), ('step_id', 1)]",False,False,,None,None
ilens_assistant,logbook,logbook_id_1,"[('logbook_id', 1)]",False,False,,None,None
ilens_assistant,workflows,workflow_id_1,"[('workflow_id', 1)]",False,False,,None,None
ilens_configuration,user,user_id_1_project_id_1,"[('user_id', 1), ('project_id', 1)]",False,False,,None,None
ilens_configuration,ext_access_tokens,expires_in_1,"[('expires_in', 1)]",False,False,0,None,None
ilens_configuration,tags,tag_id_1,"[('tag_id', 1)]",False,False,,None,None
ilens_configuration,tags,id_1,"[('id', 1)]",False,False,,None,None
ilens_configuration,scheduled_jobs,next_run_time_1,"[('next_run_time', 1)]",False,True,,None,None
ilens_configuration,accessible_hierarchy,project_id_1_user_id_1,"[('project_id', 1), ('user_id', 1)]",False,False,,None,None
ilens_reports,scheduled_jobs_energy_reports,next_run_time_1,"[('next_run_time', 1)]",False,True,,None,None
ilens_schedule_rule,scheduled_jobs,next_run_time_1,"[('next_run_time', 1)]",False,True,,None,None
ilens_widget,ilens_annotations,hierarchy_1_ts_1,"[('hierarchy', 1), ('ts', 1)]",False,False,,None,None
project_101__ilens_asset_model,asset_model_rule_engine,rule_engine_id_1,"[('rule_engine_id', 1)]",False,False,,None,None
project_101__ilens_asset_model,asset_model_rule_engine,project_id_1,"[('project_id', 1)]",False,False,,None,None
project_101__ilens_asset_model,asset_model_rule_engine,type_1,"[('type', 1)]",False,False,,None,None
project_101__ilens_asset_model,asset_model_details,asset_model_id_1,"[('asset_model_id', 1)]",False,False,,None,None
project_101__ilens_asset_model,asset_model_details,asset_model_id_1_asset_version_1,"[('asset_model_id', 1), ('asset_version', 1)]",False,False,,None,None
project_101__ilens_assistant,task_instances_2024015,task_id_1_logbook_id_1,"[('task_id', 1), ('logbook_id', 1)]",False,False,,None,None
project_101__ilens_assistant,task_instances_2024015,task_description_text,"[('_fts', 'text'), ('_ftsx', 1)]",False,False,,None,None
project_101__ilens_assistant,task_instances,task_category_1_meta.created_at_1_task_description_1,"[('task_category', 1), ('meta.created_at', 1), ('task_description', 1)]",True,False,,"SON([('task_category', 'Production Down Time')])",None
project_101__ilens_assistant,form_props,step_id_text,"[('_fts', 'text'), ('_ftsx', 1)]",False,False,,None,None
project_101__ilens_assistant,workflows,workflow_id_1,"[('workflow_id', 1)]",False,False,,None,None
project_101__ilens_assistant,workflow_permissions,workflow_id_text_step_id_text_user_role_text_permissions_text,"[('_fts', 'text'), ('_ftsx', 1)]",False,False,,None,None
project_101__ilens_assistant,Copy_of_steps_bkp_v2,step_id_-1_step_name_-1_step_category_-1,"[('step_id', -1), ('step_name', -1), ('step_category', -1)]",False,False,,None,None
project_101__ilens_assistant,Copy_of_steps_bkp,step_id_-1_step_name_-1_step_category_-1,"[('step_id', -1), ('step_name', -1), ('step_category', -1)]",False,False,,None,None
project_101__ilens_assistant,task_instance_data,task_id_1_stage_id_1,"[('task_id', 1), ('stage_id', 1)]",False,False,,None,None
project_101__ilens_assistant,task_instance_data,stage_id_-1,"[('stage_id', -1)]",False,False,,None,None
project_101__ilens_assistant,steps,step_id_-1_step_name_-1_step_category_-1,"[('step_id', -1), ('step_name', -1), ('step_category', -1)]",False,False,,None,None
project_101__ilens_assistant,steps,project_id_1,"[('project_id', 1)]",False,False,,None,None
project_101__ilens_assistant,steps,step_version_-1_meta.created_at_-1,"[('step_version', -1), ('meta.created_at', -1)]",False,False,,None,None
project_101__ilens_assistant,steps,step_id_1,"[('step_id', 1)]",False,False,,None,None
project_101__ilens_assistant,steps,project_id_1_step_version_-1_meta.created_at_-1_step_id_1,"[('project_id', 1), ('step_version', -1), ('meta.created_at', -1), ('step_id', 1)]",False,False,,None,None
project_101__ilens_assistant,task_info,task_info_id_-1,"[('task_info_id', -1)]",True,False,,None,None
project_101__ilens_assistant,logbook,logbook_id_1,"[('logbook_id', 1)]",False,False,,None,None
project_101__ilens_assistant,periodic_data,date_1_step_id_1,"[('date', 1), ('step_id', 1)]",False,False,,None,None
project_101__ilens_configuration,ilens_devices,ilens_device_id_1,"[('ilens_device_id', 1)]",False,False,,None,None
project_101__ilens_configuration,ilens_devices,physical_device_group_id_1,"[('physical_device_group_id', 1)]",False,False,,None,None
project_101__ilens_configuration,tags,tag_id_1,"[('tag_id', 1)]",False,False,,None,None
project_101__ilens_configuration,tags,id_1,"[('id', 1)]",False,False,,None,None
project_101__ilens_configuration,units,id_1,"[('id', 1)]",True,False,,None,None
project_101__ilens_configuration,rule_engine,processOn_1,"[('processOn', 1)]",False,False,,None,None
project_101__ilens_configuration,rule_engine,rule_engine_id_1_project_id_1,"[('rule_engine_id', 1), ('project_id', 1)]",False,False,,None,None
project_101__ilens_configuration,rule_engine,last_updated_1,"[('last_updated', 1)]",False,False,,None,None
project_101__ilens_configuration,tag_hierarchy,id_1,"[('id', 1)]",False,False,,None,None
project_101__ilens_configuration,tag_hierarchy,parameter_id_1,"[('parameter_id', 1)]",False,False,,None,None
project_101__ilens_configuration,tag_hierarchy,site_id_1,"[('site_id', 1)]",False,False,,None,None
project_101__ilens_configuration,tag_hierarchy,project_id_1,"[('project_id', 1)]",False,False,,None,None
project_101__ilens_configuration,channel_pp_device,device_model_id_1,"[('device_model_id', 1)]",False,False,,None,None
project_101__ilens_configuration,channel_pp_device,site_id_1,"[('site_id', 1)]",False,False,,None,None
project_101__ilens_configuration,channel_pp_device,pipeline_device_id_1,"[('pipeline_device_id', 1)]",False,False,,None,None
project_101__ilens_configuration,data_upload_history,project_upload,"[('project_id', -1), ('upload_id', -1)]",False,False,,None,None
project_101__ilens_configuration,data_upload_history,file_id_-1,"[('file_id', -1)]",False,False,,None,None
project_101__ilens_configuration,assets,hierarchy_1,"[('hierarchy', 1)]",False,False,,None,None
project_101__ilens_configuration,assets,node_id_1,"[('node_id', 1)]",False,False,,None,None
project_101__ilens_configuration,rule_template,rule_template_id_1,"[('rule_template_id', 1)]",False,False,,None,None
project_101__ilens_configuration,rule_template,Selected_ruleType_1,"[('Selected_ruleType', 1)]",False,False,,None,None
project_101__ilens_configuration,rule_template,category_1,"[('category', 1)]",False,False,,None,None
project_101__ilens_configuration,hierarchy_details,node_id_1,"[('node_id', 1)]",False,False,,None,None
project_101__ilens_configuration,hierarchy_details,parent_id_1,"[('parent_id', 1)]",False,False,,None,None
project_101__ilens_configuration,hierarchy_details,type_1,"[('type', 1)]",False,False,,None,None
project_101__ilens_configuration,hierarchy_details,project_id_1,"[('project_id', 1)]",False,False,,None,None
project_101__ilens_configuration,events_collection,ilens_device_id_-1_event_id_-1,"[('ilens_device_id', -1), ('event_id', -1)]",False,False,,None,None
project_101__ilens_configuration,physical_device_group,physical_device_group_id_1,"[('physical_device_group_id', 1)]",False,False,,None,None
project_101__ilens_configuration,lookup_table,lookup_name_1,"[('lookup_name', 1)]",False,False,,None,None
project_101__ilens_configuration,lookup_table,project_id_1,"[('project_id', 1)]",False,False,,None,None
project_101__ilens_widget,dashboard,dashboard_id_1,"[('dashboard_id', 1)]",True,False,,None,None
project_101__ilens_widget,dashboard,category_1,"[('category', 1)]",False,False,,None,None
project_101__ilens_widget,ilens_annotations,hierarchy_1_ts_1,"[('hierarchy', 1), ('ts', 1)]",False,False,,None,None
"""
Author: Owaiz Mustafa Khan
Email: owaiz.mustafakhan@rockwellautomation.com
"""
import uvicorn
if __name__ == '__main__':
uvicorn.run('main:app')
\ No newline at end of file
[MONGO_DB]
MONGO_URI=$MONGO_URI
[POSTGRES_DB]
POSTGRES_URI=$POSTGRES_URI
import json
import csv
import logging
from pymongo import MongoClient
def setup_logging():
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s | %(levelname)s | %(message)s',
handlers=[
logging.StreamHandler(),
]
)
def connect_to_mongo(uri):
try:
logging.info("Connecting to MongoDB...")
client = MongoClient(uri)
client.admin.command('ping')
logging.info("Connected to MongoDB.")
return client
except Exception as e:
logging.error(f"Connection failed: {e}")
exit(1)
def get_user_defined_indexes(client):
result = []
json_output = {}
try:
databases = client.list_database_names()
except Exception as e:
logging.error(f"Failed to list databases: {e}")
exit(1)
for db_name in databases:
if db_name in ("admin", "local", "config"):
continue
db = client[db_name]
json_output[db_name] = {}
try:
collections = db.list_collection_names()
except Exception as e:
logging.error(f"Failed to list collections in '{db_name}': {e}")
continue
for collection_name in collections:
collection = db[collection_name]
try:
indexes = collection.index_information()
user_indexes = []
for index_name, index_data in indexes.items():
if index_name == "_id_":
continue
index_info = {
"database": db_name,
"collection": collection_name,
"name": index_name,
"key": str(index_data.get("key")),
"unique": index_data.get("unique", False),
"sparse": index_data.get("sparse", False),
"expireAfterSeconds": index_data.get("expireAfterSeconds"),
"partialFilterExpression": str(index_data.get("partialFilterExpression")),
"collation": str(index_data.get("collation"))
}
result.append(index_info)
user_indexes.append(index_info)
json_output[db_name][collection_name] = user_indexes
except Exception as e:
logging.error(f"Failed to get indexes for {collection_name} in {db_name}: {e}")
return result, json_output
def save_to_json(data, filepath="mongo_indexes.json"):
try:
with open(filepath, "w", encoding="utf-8") as f:
json.dump(data, f, indent=4)
logging.info(f"Index data saved to JSON: {filepath}")
except Exception as e:
logging.error(f"Failed to save JSON: {e}")
def save_to_csv(data, filepath="mongo_indexes.csv"):
try:
with open(filepath, mode="w", newline="", encoding="utf-8") as f:
fieldnames = [
"database", "collection", "name", "key", "unique",
"sparse", "expireAfterSeconds", "partialFilterExpression", "collation"
]
writer = csv.DictWriter(f, fieldnames=fieldnames)
writer.writeheader()
writer.writerows(data)
logging.info(f"Index data saved to CSV: {filepath}")
except Exception as e:
logging.error(f"Failed to save CSV: {e}")
def main():
setup_logging()
# uri = "mongodb://dev-mongo-readwrite:dev-mongo-readwrite%40123@192.168.0.220:31589/?directConnection=true"
# uri="mongodb://admin:iLens%23JUBv705@192.168.0.207:3689/?authMechanism=DEFAULT&directConnection=true"
uri="mongodb://iLens:iLens%231234@192.168.0.207:3599/?authMechanism=DEFAULT&directConnection=true"
client = connect_to_mongo(uri)
index_list, json_structure = get_user_defined_indexes(client)
save_to_json(json_structure)
save_to_csv(index_list)
logging.info("Index extraction completed.")
if __name__ == "__main__":
main()
\ No newline at end of file
This source diff could not be displayed because it is too large. You can view the blob instead.
"""
Author: Owaiz Mustafa Khan
Email: owaiz.mustafakhan@rockwellautomation.com
"""
from fastapi import FastAPI, HTTPException
from scripts.schemas.postgres_schema import GetTablesInfoResponse, UpdateIndexResponse, AddIndex, \
DeleteIndexResponse, DeleteIndex, AddIndexResponse
from scripts.utils.common import CommonUtils
app = FastAPI()
common = CommonUtils()
@app.get('/get_tables_info', response_model=GetTablesInfoResponse)
def get_tables_info(
) -> GetTablesInfoResponse:
"""
This API Retrieves The Postgres Tables' Metadata
Params:
No Parameters
Return:
UpdateIndexResponse i.e., All the data in postgres_default_schema_info collections
"""
data = common.get_tables_info()
return data
@app.get('/update_index_from_db')
def update_index_from_db() -> dict:
"""
This API Retrieves The Postgres Tables' Metadata
Params:
No Parameters
Return:
UpdateIndexResponse i.e., All the data in postgres_default_schema_info collections
"""
data = common.update_index_from_db()
return data
@app.post('/add_index', response_model=AddIndexResponse | UpdateIndexResponse)
def add_index(payload: AddIndex) -> AddIndexResponse | UpdateIndexResponse:
"""
This API Retrieves The Postgres Tables' Metadata
Params:
No Parameters
Return:
GetTablesInfoResponse i.e., All the data in postgres_default_schema_info collections
"""
data = common.add_index(payload)
if isinstance(data, HTTPException):
raise data
return data
@app.post('/delete_index', response_model=DeleteIndexResponse)
def delete_index(payload: DeleteIndex) -> DeleteIndexResponse:
"""
This API Retrieves The Postgres Tables' Metadata
Params:
No Parameters
Return:
UpdateIndexResponse i.e., All the data in postgres_default_schema_info collections
"""
data = common.delete_index(payload)
if isinstance(data, HTTPException):
raise data
return data
{
"ilens_asset_model": {
"industry_category": []
},
"ilens_assistant": {
"converter_data": [],
"task_instance_data": [
{
"database": "ilens_assistant",
"collection": "task_instance_data",
"name": "task_id_1_stage_id_1",
"key": "[('task_id', 1), ('stage_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"anode_furnace_data": [],
"task_instances": [
{
"database": "ilens_assistant",
"collection": "task_instances",
"name": "task_id_1_logbook_id_1",
"key": "[('task_id', 1), ('logbook_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"logbook_action_templates": [],
"logbook_trigger_templates": [],
"periodic_data": [
{
"database": "ilens_assistant",
"collection": "periodic_data",
"name": "date_1_step_id_1",
"key": "[('date', 1), ('step_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"logbook": [
{
"database": "ilens_assistant",
"collection": "logbook",
"name": "logbook_id_1",
"key": "[('logbook_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"step_category": [],
"workflows": [
{
"database": "ilens_assistant",
"collection": "workflows",
"name": "workflow_id_1",
"key": "[('workflow_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"action_templates": [],
"trigger_templates": []
},
"ilens_clients": {
"ilens_clients": []
},
"ilens_configuration": {
"unit_conversion": [],
"alarm_priority": [],
"event_log": [],
"protocol_category": [],
"hierarchy_dfm": [],
"unit_group": [],
"pm_config": [],
"associate_pp_device": [],
"user": [
{
"database": "ilens_configuration",
"collection": "user",
"name": "user_id_1_project_id_1",
"key": "[('user_id', 1), ('project_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"physical_device_group": [],
"pipeline_instance": [],
"user_info_history": [],
"channel_pp_device": [],
"elogbook_rule_engine": [],
"channel_pipeline": [],
"agent_licenses": [],
"normalized_errors": [],
"protocol_list": [],
"tag_groups": [],
"customer_logos": [],
"device_type_templates": [],
"device_monitor": [],
"global_events": [],
"units": [],
"rules": [],
"bookmarks": [],
"tag_category": [],
"user_project": [],
"rule_definition": [],
"database_config": [],
"ilens_device_audit": [],
"ext_access_tokens": [
{
"database": "ilens_configuration",
"collection": "ext_access_tokens",
"name": "expires_in_1",
"key": "[('expires_in', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": 0,
"partialFilterExpression": "None",
"collation": "None"
}
],
"report_templates": [],
"live_tags": [],
"ai_rules": [],
"category_apps": [],
"tags": [
{
"database": "ilens_configuration",
"collection": "tags",
"name": "tag_id_1",
"key": "[('tag_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "ilens_configuration",
"collection": "tags",
"name": "id_1",
"key": "[('id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"alarm_note": [],
"plugin_state": [],
"ilens_devices": [],
"static_content": [],
"system_errors": [],
"email_gateway": [],
"jobs": [],
"user_role": [],
"access_group": [],
"shifts": [],
"downtime_rules": [],
"pipeline_category": [],
"scheduled_jobs": [
{
"database": "ilens_configuration",
"collection": "scheduled_jobs",
"name": "next_run_time_1",
"key": "[('next_run_time', 1)]",
"unique": false,
"sparse": true,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"events_collection": [],
"multiplication_factor": [],
"printers": [],
"flow_model_static_content": [],
"pipeline_info": [],
"job_list": [],
"customer_projects": [],
"license_templates": [],
"header": [],
"static": [],
"protocol_list_backup": [],
"thresholds": [],
"mapped_errors": [],
"vision_camera_config": [],
"project_preference": [],
"accessible_hierarchy": [
{
"database": "ilens_configuration",
"collection": "accessible_hierarchy",
"name": "project_id_1_user_id_1",
"key": "[('project_id', 1), ('user_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"default_pipelines": [],
"custom_node": [],
"reports": [],
"downtime_reasons": [],
"rule_engine": [],
"event_plan": [],
"site_conf": [],
"process_conf": [],
"lookup_table": [],
"rule_targets": [],
"user_preference": [],
"unique_id": [],
"user_recent": [],
"customer_apps": [],
"sms_gateway": [],
"device_model": [],
"scheduled_job_runs": [],
"spc_settings": [],
"gateway_instance": [],
"constants": [],
"physical_device": [],
"alarm_configuration": [],
"event_schedule": [],
"countries": [],
"schedule_metadata": []
},
"ilens_data_explorer": {
"filters": []
},
"ilens_default_info": {
"default_lookup": [],
"index_info": [],
"default_user_role": []
},
"ilens_elogbook": {
"periodic_data_entry": [],
"user_data_entry": [],
"e_logbook_template": [],
"e_logbook_configuration": [],
"unique_id": []
},
"ilens_equipment_maintenance": {
"maintenance_config": [],
"maintenance_engine_audit": [],
"maintenance_workorders": [],
"static_content": [],
"maintenance_plan": [],
"maintenance_log": []
},
"ilens_events": {
"events": [],
"gateway_events": [],
"alarm_audit": [],
"alarms": [],
"triggered_alarms": []
},
"ilens_inventory": {
"workorder_spare_parts": []
},
"ilens_mes": {
"product_lifecycle": [],
"product_instance": [],
"qa_schedule": [],
"mes_schedule": [],
"prod_schedule": [],
"product_master": [],
"unique_id": [],
"mes_hierarchy_mapping": [],
"product_master_old": []
},
"ilens_metadata": {
"unique_id": []
},
"ilens_reports": {
"energy_report_jobs_metadata": [],
"energy_reports": [],
"scheduled_jobs_energy_reports": [
{
"database": "ilens_reports",
"collection": "scheduled_jobs_energy_reports",
"name": "next_run_time_1",
"key": "[('next_run_time', 1)]",
"unique": false,
"sparse": true,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
]
},
"ilens_schedule_rule": {
"scheduled_jobs": [
{
"database": "ilens_schedule_rule",
"collection": "scheduled_jobs",
"name": "next_run_time_1",
"key": "[('next_run_time', 1)]",
"unique": false,
"sparse": true,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
]
},
"ilens_templates": {
"forms": [],
"template_category": []
},
"ilens_widget": {
"category": [],
"dashboard": [],
"ilens_annotations": [
{
"database": "ilens_widget",
"collection": "ilens_annotations",
"name": "hierarchy_1_ts_1",
"key": "[('hierarchy', 1), ('ts', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"report": [],
"spc_dashboard": [],
"widget": [],
"widget_metadata": [],
"logs": []
},
"mqtt": {
"mqtt_user": [],
"mqtt_acl": []
},
"project_101__ilens_asset_model": {
"asset_model_rule_engine": [
{
"database": "project_101__ilens_asset_model",
"collection": "asset_model_rule_engine",
"name": "rule_engine_id_1",
"key": "[('rule_engine_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_asset_model",
"collection": "asset_model_rule_engine",
"name": "project_id_1",
"key": "[('project_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_asset_model",
"collection": "asset_model_rule_engine",
"name": "type_1",
"key": "[('type', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"asset_model_details": [
{
"database": "project_101__ilens_asset_model",
"collection": "asset_model_details",
"name": "asset_model_id_1",
"key": "[('asset_model_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_asset_model",
"collection": "asset_model_details",
"name": "asset_model_id_1_asset_version_1",
"key": "[('asset_model_id', 1), ('asset_version', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
]
},
"project_101__ilens_assistant": {
"task_instances_2024015": [
{
"database": "project_101__ilens_assistant",
"collection": "task_instances_2024015",
"name": "task_id_1_logbook_id_1",
"key": "[('task_id', 1), ('logbook_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_assistant",
"collection": "task_instances_2024015",
"name": "task_description_text",
"key": "[('_fts', 'text'), ('_ftsx', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"task_instances": [
{
"database": "project_101__ilens_assistant",
"collection": "task_instances",
"name": "task_category_1_meta.created_at_1_task_description_1",
"key": "[('task_category', 1), ('meta.created_at', 1), ('task_description', 1)]",
"unique": true,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "SON([('task_category', 'Production Down Time')])",
"collation": "None"
}
],
"form_props": [
{
"database": "project_101__ilens_assistant",
"collection": "form_props",
"name": "step_id_text",
"key": "[('_fts', 'text'), ('_ftsx', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"backup_task_instances": [],
"workflows": [
{
"database": "project_101__ilens_assistant",
"collection": "workflows",
"name": "workflow_id_1",
"key": "[('workflow_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"workflow_permissions": [
{
"database": "project_101__ilens_assistant",
"collection": "workflow_permissions",
"name": "workflow_id_text_step_id_text_user_role_text_permissions_text",
"key": "[('_fts', 'text'), ('_ftsx', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"logbook_links": [],
"Copy_of_steps_bkp_v2": [
{
"database": "project_101__ilens_assistant",
"collection": "Copy_of_steps_bkp_v2",
"name": "step_id_-1_step_name_-1_step_category_-1",
"key": "[('step_id', -1), ('step_name', -1), ('step_category', -1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"Copy_of_steps_bkp": [
{
"database": "project_101__ilens_assistant",
"collection": "Copy_of_steps_bkp",
"name": "step_id_-1_step_name_-1_step_category_-1",
"key": "[('step_id', -1), ('step_name', -1), ('step_category', -1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"triggers": [],
"reference_steps": [],
"task_instance_data": [
{
"database": "project_101__ilens_assistant",
"collection": "task_instance_data",
"name": "task_id_1_stage_id_1",
"key": "[('task_id', 1), ('stage_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_assistant",
"collection": "task_instance_data",
"name": "stage_id_-1",
"key": "[('stage_id', -1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"periodic_stage_status": [],
"scheduled_info": [],
"logbook_triggers": [],
"steps": [
{
"database": "project_101__ilens_assistant",
"collection": "steps",
"name": "step_id_-1_step_name_-1_step_category_-1",
"key": "[('step_id', -1), ('step_name', -1), ('step_category', -1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_assistant",
"collection": "steps",
"name": "project_id_1",
"key": "[('project_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_assistant",
"collection": "steps",
"name": "step_version_-1_meta.created_at_-1",
"key": "[('step_version', -1), ('meta.created_at', -1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_assistant",
"collection": "steps",
"name": "step_id_1",
"key": "[('step_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_assistant",
"collection": "steps",
"name": "project_id_1_step_version_-1_meta.created_at_-1_step_id_1",
"key": "[('project_id', 1), ('step_version', -1), ('meta.created_at', -1), ('step_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"task_info": [
{
"database": "project_101__ilens_assistant",
"collection": "task_info",
"name": "task_info_id_-1",
"key": "[('task_info_id', -1)]",
"unique": true,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"logbook": [
{
"database": "project_101__ilens_assistant",
"collection": "logbook",
"name": "logbook_id_1",
"key": "[('logbook_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"periodic_data": [
{
"database": "project_101__ilens_assistant",
"collection": "periodic_data",
"name": "date_1_step_id_1",
"key": "[('date', 1), ('step_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
]
},
"project_101__ilens_configuration": {
"physical_device": [],
"report_templates": [],
"pipeline_category": [],
"pipeline_instance": [],
"ilens_devices": [
{
"database": "project_101__ilens_configuration",
"collection": "ilens_devices",
"name": "ilens_device_id_1",
"key": "[('ilens_device_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_configuration",
"collection": "ilens_devices",
"name": "physical_device_group_id_1",
"key": "[('physical_device_group_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"customer_apps": [],
"temp_asset_tag": [],
"constants": [],
"unique_id": [],
"tags": [
{
"database": "project_101__ilens_configuration",
"collection": "tags",
"name": "tag_id_1",
"key": "[('tag_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_configuration",
"collection": "tags",
"name": "id_1",
"key": "[('id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"unit_group": [],
"units": [
{
"database": "project_101__ilens_configuration",
"collection": "units",
"name": "id_1",
"key": "[('id', 1)]",
"unique": true,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"edge_connectivity": [],
"rule_engine": [
{
"database": "project_101__ilens_configuration",
"collection": "rule_engine",
"name": "processOn_1",
"key": "[('processOn', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_configuration",
"collection": "rule_engine",
"name": "rule_engine_id_1_project_id_1",
"key": "[('rule_engine_id', 1), ('project_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_configuration",
"collection": "rule_engine",
"name": "last_updated_1",
"key": "[('last_updated', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"gateway_instance": [],
"rule_targets": [],
"reports": [],
"tag_groups": [],
"category_apps": [],
"tag_hierarchy": [
{
"database": "project_101__ilens_configuration",
"collection": "tag_hierarchy",
"name": "id_1",
"key": "[('id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_configuration",
"collection": "tag_hierarchy",
"name": "parameter_id_1",
"key": "[('parameter_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_configuration",
"collection": "tag_hierarchy",
"name": "site_id_1",
"key": "[('site_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_configuration",
"collection": "tag_hierarchy",
"name": "project_id_1",
"key": "[('project_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"tag_category": [],
"channel_pp_device": [
{
"database": "project_101__ilens_configuration",
"collection": "channel_pp_device",
"name": "device_model_id_1",
"key": "[('device_model_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_configuration",
"collection": "channel_pp_device",
"name": "site_id_1",
"key": "[('site_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_configuration",
"collection": "channel_pp_device",
"name": "pipeline_device_id_1",
"key": "[('pipeline_device_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"process_conf": [],
"ilens_device_audit": [],
"data_upload_history": [
{
"database": "project_101__ilens_configuration",
"collection": "data_upload_history",
"name": "project_upload",
"key": "[('project_id', -1), ('upload_id', -1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_configuration",
"collection": "data_upload_history",
"name": "file_id_-1",
"key": "[('file_id', -1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"static_content": [],
"assets": [
{
"database": "project_101__ilens_configuration",
"collection": "assets",
"name": "hierarchy_1",
"key": "[('hierarchy', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_configuration",
"collection": "assets",
"name": "node_id_1",
"key": "[('node_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"data_upload_error": [],
"pipeline_info": [],
"database_config": [],
"tags_v2_association": [],
"alarm_configuration": [],
"design_tag_data": [],
"search_information": [],
"rule_template": [
{
"database": "project_101__ilens_configuration",
"collection": "rule_template",
"name": "rule_template_id_1",
"key": "[('rule_template_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_configuration",
"collection": "rule_template",
"name": "Selected_ruleType_1",
"key": "[('Selected_ruleType', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_configuration",
"collection": "rule_template",
"name": "category_1",
"key": "[('category', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"unit_conversion": [],
"hierarchy_details": [
{
"database": "project_101__ilens_configuration",
"collection": "hierarchy_details",
"name": "node_id_1",
"key": "[('node_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_configuration",
"collection": "hierarchy_details",
"name": "parent_id_1",
"key": "[('parent_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_configuration",
"collection": "hierarchy_details",
"name": "type_1",
"key": "[('type', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_configuration",
"collection": "hierarchy_details",
"name": "project_id_1",
"key": "[('project_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"associate_pp_device": [],
"download_job_list": [],
"device_model": [],
"data_upload": [],
"device_monitor": [],
"job_list": [],
"events_collection": [
{
"database": "project_101__ilens_configuration",
"collection": "events_collection",
"name": "ilens_device_id_-1_event_id_-1",
"key": "[('ilens_device_id', -1), ('event_id', -1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"batch_jobs": [],
"alarm_note": [],
"physical_device_group": [
{
"database": "project_101__ilens_configuration",
"collection": "physical_device_group",
"name": "physical_device_group_id_1",
"key": "[('physical_device_group_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"ilens_events": [],
"temp_parameters": [],
"notification_ack_details": [],
"materials": [],
"site_conf": [],
"redundant_device_mapping": [],
"channel_pipeline": [],
"lookup_table": [
{
"database": "project_101__ilens_configuration",
"collection": "lookup_table",
"name": "lookup_name_1",
"key": "[('lookup_name', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_configuration",
"collection": "lookup_table",
"name": "project_id_1",
"key": "[('project_id', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"platform_notifications": [],
"temp_data_mapping": [],
"custom_node": [],
"shifts": []
},
"project_101__ilens_data_explorer": {
"worksheet": [],
"workspaces": []
},
"project_101__ilens_hmi": {
"sld_new": []
},
"project_101__ilens_widget": {
"dashboard": [
{
"database": "project_101__ilens_widget",
"collection": "dashboard",
"name": "dashboard_id_1",
"key": "[('dashboard_id', 1)]",
"unique": true,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
},
{
"database": "project_101__ilens_widget",
"collection": "dashboard",
"name": "category_1",
"key": "[('category', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"category": [],
"spc_dashboard": [],
"template_widget": [],
"widget_comments": [],
"chart_report_templates": [],
"template_dashboard": [],
"widget": [],
"ilens_annotations": [
{
"database": "project_101__ilens_widget",
"collection": "ilens_annotations",
"name": "hierarchy_1_ts_1",
"key": "[('hierarchy', 1), ('ts', 1)]",
"unique": false,
"sparse": false,
"expireAfterSeconds": null,
"partialFilterExpression": "None",
"collation": "None"
}
],
"widget_plugin": []
},
"project_166__ilens_configuration": {
"rule_targets": [],
"constants": [],
"device_model": [],
"database_config": [],
"calibration_meta_collection": [],
"lookup_table": []
},
"supportlens_configuration": {
"subcategory_configuration": [],
"unique_id": [],
"ticket_configuration": [],
"resolver_configuration": [],
"category_configuration": []
}
}
\ No newline at end of file
"""
Author: Owaiz Mustafa Khan
Email: owaiz.mustafakhan@rockwellautomation.com
This file exposes configurations from config file and environments as Class Objects
"""
# if __name__ == "__main__":
from dotenv import load_dotenv
load_dotenv()
import os.path
import sys
from configparser import BasicInterpolation, ConfigParser
class EnvInterpolation(BasicInterpolation):
"""
Interpolation which expands environment variables in values.
"""
def before_get(self, parser, section, option, value, defaults):
value = super().before_get(parser, section, option, value, defaults)
if not os.path.expandvars(value).startswith("$"):
return os.path.expandvars(value)
else:
return
try:
config = ConfigParser(interpolation=EnvInterpolation())
config.read("conf/application.conf")
except Exception as e:
print(f"Error while loading the config: {e}")
print("Failed to Load Configuration. Exiting!!!")
sys.stdout.flush()
sys.exit()
class DBConf:
POSTGRES_URI = f'{config.get("POSTGRES_DB", "POSTGRES_URI")}/project_216__ilens_alarms'
if not POSTGRES_URI:
print("Error, environment variable POSTGRES_URI not set")
sys.exit(1)
MONGO_URI = config.get("MONGO_DB", "MONGO_URI")
if not MONGO_URI:
print("Error, environment variable MONGO_URI not set")
sys.exit(1)
\ No newline at end of file
"""
Author: Owaiz Mustafa Khan
Email: owaiz.mustafakhan@rockwellautomation.com
"""
class MongoConstants:
# Databases
db_default_info = 'ilens_default_info'
# Collections
collection_postgres_default_schema_info = 'postgres_default_schema_info'
import datetime
from typing import Optional
from sqlalchemy import ForeignKey
from sqlalchemy.orm import Mapped, mapped_column
from scripts.db.psql.databases import Base
class AlarmEventHistorySchema(Base):
__tablename__ = "alarm_event_history"
alarm_event_id: Mapped[str] = mapped_column(primary_key=True)
alarm_id: Mapped[str] = mapped_column(ForeignKey(alarmdefinition_id))
trigger_time: Mapped[datetime.datetime] = mapped_column()
trigger_condition: Mapped[str]
project_id: Mapped[str]
alarm_status: Mapped[Optional[str]] = mapped_column(default="")
alarm_state: Mapped[Optional[str]] = mapped_column(default="")
ack_status: Mapped[Optional[bool]] = mapped_column(default=False)
end_time: Mapped[Optional[datetime.datetime]] = mapped_column(default=None)
duration: Mapped[Optional[int]] = mapped_column(default=0)
alarm_template: Mapped[Optional[str]] = mapped_column(default="")
\ No newline at end of file
"""
Author: Owaiz Mustafa Khan
Email: owaiz.mustafakhan@rockwellautomation.com
"""
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from scripts.config.app_configurations import DBConf
engine = create_engine(DBConf.POSTGRES_URI)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
# Dependency
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
"""
Author: Owaiz Mustafa Khan
Email: owaiz.mustafakhan@rockwellautomation.com
"""
from typing import Optional
from pydantic import BaseModel, Field
class UpdateIndexFromDB(BaseModel):
collection_name: str = Field(
default='index_info',
title='Collection Name',
description='The name of collection in which index metadata is present',
alias='collection_name'
)
db_name: str = Field(
default='ilens_default_info',
title='Database Name',
description='The name of database in which the collection is present',
alias='db_name'
)
class DeleteIndex(BaseModel):
name: str = Field(
default=None,
title='Index Name',
description='The name of index is to be removed',
alias='name'
)
collection_name: str = Field(
default=None,
title='Collection Name',
description='The name of collection from which index is to be removed',
alias='collection_name'
)
metadata_collection_name: str = Field(
default='index_info',
title='Metadata Collection Name',
description='The name of collection which contains the index metadata',
alias='metadata_collection_name'
)
db_name: str = Field(
default=None,
title='Database Name',
description='The name of database in which the collection is present',
alias='db_name'
)
metadata_db_name: str = Field(
default='ilens_default_info',
title='Metadata Database Name',
description='The name of database in which metadata collection if present',
alias='metadata_db_name'
)
"""
Author: Owaiz Mustafa Khan
Email: owaiz.mustafakhan@rockwellautomation.com
"""
from typing import Optional
from pydantic import BaseModel
class PostgresColumnConstraint(BaseModel):
name: Optional[str] = None
class PostgresTableColumn(BaseModel):
name: str
data_type: str
constraints: list[PostgresColumnConstraint]
class PostgresTableConstraint(BaseModel):
type: str
columns: list[str]
class PostgresTableIndex(BaseModel):
name: str
columns: list[str]
unique: bool
type: str
class PostgresTableBase(BaseModel):
table_name: str
indexes: list[PostgresTableIndex]
class PostgresTable(PostgresTableBase):
columns: list[PostgresTableColumn]
constraints: list[PostgresTableConstraint]
priority: int
class PostgresTableSchemaBase(BaseModel):
schema_name: str
class PostgresTableSchemas(PostgresTableSchemaBase):
tables: list[PostgresTable]
class PostgresDefaultSchemaBase(BaseModel):
database_name: str
class IndexBase(BaseModel):
database_name: str
schema_name: str
table_name: str
index: PostgresTableIndex
# Request
class PostgresDefaultSchema(PostgresDefaultSchemaBase):
schemas: list[PostgresTableSchemas]
class AddIndex(IndexBase):
pass
class UpdateIndex(IndexBase):
pass
class DeleteIndex(BaseModel):
name: str
# Responses
class GetTablesInfoResponse(BaseModel):
data: list[PostgresDefaultSchema]
class AddIndexResponse(IndexBase):
message: str = 'Successfully Added New Index'
class UpdateIndexResponse(IndexBase):
message: str = 'Successfully Updated Index'
class DeleteIndexResponse(IndexBase):
message: str = 'Successfully Deleted Index'
class UpdateIndexResponses(BaseModel):
data: list[UpdateIndexResponse]
\ No newline at end of file
"""
Author: Owaiz Mustafa Khan
Email: owaiz.mustafakhan@rockwellautomation.com
"""
from pydantic import BaseModel
class DetectOS(BaseModel):
os_name:str
os_version:str
os_release:str
message: str
\ No newline at end of file
"""
Author: Owaiz Mustafa Khan
Email: owaiz.mustafakhan@rockwellautomation.com
"""
import platform
from fastapi import status, HTTPException
from psycopg2 import NotSupportedError
from scripts.constants.db import MongoConstants
from scripts.schemas.postgres_schema import GetTablesInfoResponse, AddIndex, \
UpdateIndexResponse, UpdateIndex, DeleteIndex, DeleteIndexResponse, AddIndexResponse
from scripts.schemas.util_schema import DetectOS
from scripts.utils.common.db.mongo import find_all, get_collection, find_index, delete_index, add_index, update_index
from scripts.utils.common.db.postgres import create_index_from_data, recreate_index, drop_index_from_data
class CommonUtils:
@staticmethod
def detect_os() -> DetectOS:
"""
Helper Function To Detect OS
:return: **DetectOS**
"""
result = DetectOS(
os_name = platform.system(),
os_version=platform.version(),
os_release= platform.release(),
message="Unknown or unsupported OS."
)
print(f"Operating System: {result.os_name}")
print(f"OS Release: {result.os_release}")
print(f"OS Version: {result.os_version}")
if result.os_name == "Windows":
result.message = "This is a Windows system."
elif result.os_name == "Linux":
result.message = "This is a Linux system."
elif result.os_name == "Darwin":
result.message = "This is macOS."
print(result.message)
return result
if __name__ == "__main__":
print(detect_os())
def get_tables_info(self):
"""
This function is used to get all the records from the collection 'postgres_default_schema_info'
:return: **GetTablesInfoResponse**: all the records of the collection or False if any error occurs
"""
try:
collection = get_collection(
MongoConstants.collection_postgres_default_schema_info,
MongoConstants.db_default_info
)
records = find_all(collection)
records = {'data': records}
return GetTablesInfoResponse(**records)
except Exception as e:
print(f'Exception occurred: {e}')
return False
def update_index_from_db(self) -> dict:
"""
This function applies all the indexes to the postgres tables on the basis of data in Mongo
:return: dict response {'status': 'SUCCESS', 'message': 'All the indexes from the db are applied to postgres'}
"""
try:
collection = get_collection(
MongoConstants.collection_postgres_default_schema_info,
MongoConstants.db_default_info
)
records = find_index(collection)
for record in records:
record = UpdateIndex(**dict(record))
create_index_from_data(record.model_dump())
return {
'status': 'SUCCESS',
'message': 'All the indexes from the db are applied to postgres'
}
except Exception as e:
print(f'Exception occurred: {e}')
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f'''There was some technical error at our end we'll resolve it quickly. Thank you for your patience.'''
)
def add_index(self, payload: AddIndex):
"""
This is a dynamic functions that Adds Index if not present or Updates Index if present subsequently updating the metadata in Mongo
AddIndex:
database_name: Name of database you want the index to be added or updated on
schema_name: Name of schema you want the index to be added or updated on
table_name: Name of table you want the index to be added or updated on
index: Class: PostgresTableIndex
name: Name of the Index
columns: Array of columns you want to include in index
type: The Access Method of Index
unique: [true/false]
:param payload: Class: AddIndex(Information of AddIndex is Mentioned Above)
:return: **AddIndexResponse** or **UpdateIndexResponse** the newly added or updated index info record of Mongo
"""
try:
collection = get_collection(
MongoConstants.collection_postgres_default_schema_info,
MongoConstants.db_default_info
)
exists = True
records = find_index(
collection,
index_name=payload.index.name,
table_name=payload.table_name,
schema_name=payload.schema_name,
database_name=payload.database_name
)
if not len(records):
exists = False
# Till here index of same name doesn't exist
# Now will check for columns
records = find_index(
collection,
database_name=payload.database_name,
schema_name=payload.schema_name,
table_name=payload.table_name
)
for record in records:
record = UpdateIndex(**record)
if (
(payload.index.columns == record.index.columns)
and
(payload.index.type == record.index.type)
):
records = [record.model_dump()]
exists = True
break
if exists:
for record in records:
index_data = dict(record)
record = UpdateIndexResponse(**index_data)
index_data.update({'new_index': payload.index.model_dump()})
recreate_index(index_data)
if not update_index(collection, index_data):
return HTTPException(
status_code=status.HTTP_417_EXPECTATION_FAILED,
detail="""Unable to update index metadata in database"""
)
return record
create_index_from_data(payload.model_dump())
if not add_index(collection, payload.model_dump()):
return HTTPException(
status_code=status.HTTP_417_EXPECTATION_FAILED,
detail="""Unable to add metadata to the database"""
)
return AddIndexResponse(**payload.model_dump())
except NotSupportedError as e:
if 'access method "hash" does not support multicolumn indexes' in str(e):
raise HTTPException(
status_code=status.HTTP_406_NOT_ACCEPTABLE,
detail="HASH Doesn't Support Multiple Columns"
)
except Exception as e:
print(f'Exception occurred: {e}')
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f'''There was some technical error at our end we'll resolve it quickly. Thank you for your patience.'''
)
def delete_index(self, payload: DeleteIndex) -> None | HTTPException | DeleteIndexResponse:
"""
This functions Deletes the index if it exists subsequently deleting the metadata in Mongo
DeleteIndex:
name: Name of the Index to delete
:param payload: Class: DeleteIndex(Information of DeleteIndex is Mentioned Above)
:return: **DeleteIndexResponse** the deleted index info record of Mongo or **HTTPException**
"""
try:
collection = get_collection(
MongoConstants.collection_postgres_default_schema_info,
MongoConstants.db_default_info
)
records = find_index(
collection,
index_name=payload.name
)
if not len(records):
return HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f'Failed To Delete.... No Index Like "{payload.name}" Exists'
)
for record in records:
index_data = dict(record)
record = DeleteIndexResponse(**record)
drop_index_from_data(index_data)
if not delete_index(collection, index_data):
return HTTPException(
status_code=status.HTTP_501_NOT_IMPLEMENTED,
detail="""Unable to delete metadata for db"""
)
return record
except Exception as e:
print(f'Exception occurred: {e}')
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f'''There was some technical error at our end we'll resolve it quickly. Thank you for your patience.'''
)
"""
Author: Owaiz Mustafa Khan
Email: owaiz.mustafakhan@rockwellautomation.com
"""
from pymongo import MongoClient
from pymongo.synchronous.collection import Collection
from pymongo.synchronous.database import Database
from scripts.config.app_configurations import DBConf
# Retrieval
mongo_client = MongoClient(DBConf.MONGO_URI)
def _get_database(name: str) -> Database:
"""
This function is used to get a specific Database from the MongoDB
:param name: Name of the database in which this collection is present
:return: database object of Database class
"""
database = mongo_client.get_database(name)
return database
def get_collection(collection_name: str, db_name: str) -> Collection:
"""
This function is used to get a specific collection from the MongoDB
:param collection_name: The name of collection you want to access
:param db_name: Name of the database in which this collection is present
:return: collection object of Collection class
"""
collection = _get_database(db_name).get_collection(collection_name)
return collection
def execute_aggregate(collection: Collection, query: list):
data = collection.aggregate(query)
data = list(data) if data else []
return data
# Operations
def find_all(collection: Collection):
"""
This function is used to get all the records present in a collection
:param collection: Object of the collection you want to access
:return: A list of all records present in the collection or [] if there are no records in collections
"""
data = collection.aggregate([{'$project': {'_id': 0}}])
find_result = list(data) if data else []
return find_result
def find_index(collection: Collection,
include_database_name: bool = True,
include_schema_name: bool = True,
include_table_name: bool = True,
full_projection: bool = False,
index_name: str = None,
table_name: str = None,
schema_name: str = None,
database_name: str = None):
"""
This function is used to get all the records present in a collection
:param database_name: Name of the database in which you want to find the index
:param schema_name: Name of the schema in which you want to find the index
:param table_name: Name of the table in which you want to find the index
:param full_projection: True if you want the whole record else False [default: False]
:param index_name: Name of the index you want to find
:param include_table_name: True if you want to include table name in result else False [default: True]
:param include_schema_name: True if you want to include schema name in result else False [default: True]
:param include_database_name: True if you want to include databse name in result else False [default: True]
:param collection: Object of the collection you want to access
:return: A list of all records present in the collection or [] if there are no records in collections
"""
query = list()
projection = {'_id': 0}
if include_database_name:
projection.update({'database_name': 1})
if include_schema_name:
projection.update({"schema_name": "$schemas.schema_name"})
if include_table_name:
projection.update({"table_name": "$schemas.tables.table_name"})
query.append({"$unwind": "$schemas"})
query.append({"$unwind": "$schemas.tables"})
query.append({"$unwind": "$schemas.tables.indexes"})
projection.update({"index": "$schemas.tables.indexes"})
match = dict()
if database_name:
match.update({"database_name": database_name})
if schema_name:
match.update({"schemas.schema_name": schema_name})
if table_name:
match.update({"schemas.tables.table_name": table_name})
if index_name:
match.update({"schemas.tables.indexes.name": index_name})
if database_name or schema_name or table_name or index_name:
query.append({'$match': match})
if full_projection:
query.append({'$project': {'_id': 0}})
else:
query.append({'$project': projection})
result = execute_aggregate(collection, query)
return result
def add_index(collection: Collection, data: dict):
query = {
"database_name": data.get('database_name'),
"schemas.schema_name": data.get('schema_name', 'public'),
"schemas.tables.table_name": data.get('table_name')
}
update = {
"$push": {
"schemas.$[s].tables.$[t].indexes": {
"name": data.get('index').get('name'),
"columns": data.get('index').get('columns'),
"unique": data.get('index').get('unique'),
"type": data.get('index').get('type')
}
}
}
array_filters = [
{"s.schema_name": data.get('schema_name', 'public')},
{"t.table_name": data.get('table_name')}
]
# Perform the update
result = collection.update_one(query, update, array_filters=array_filters)
if result.modified_count > 0:
return True
else:
return False
def update_index(collection: Collection, data: dict):
query = {
"database_name": data.get('database_name'),
"schemas.schema_name": data.get('schema_name', 'public'),
"schemas.tables.table_name": data.get('table_name'),
"schemas.tables.indexes.name": data.get('index').get('name')
}
update = {
"$set": {
"schemas.$[s].tables.$[t].indexes.$[i].name": data.get('new_index').get('name'),
"schemas.$[s].tables.$[t].indexes.$[i].columns": data.get('new_index').get('columns'),
# New columns for the index
"schemas.$[s].tables.$[t].indexes.$[i].unique": data.get('new_index').get('unique'), # Update uniqueness of the index
"schemas.$[s].tables.$[t].indexes.$[i].type": data.get('new_index').get('type') # Update index type
}
}
array_filters = [
{"s.schema_name": data.get('schema_name', 'public')},
{"t.table_name": data.get('table_name')},
{"i.name": data.get('index').get('name')}
]
# Perform the update
result = collection.update_one(query, update, array_filters=array_filters)
# Print the result
if result.modified_count > 0:
print("Index Updated successfully.")
return True
else:
print("No matching index found or index was not updated.")
return False
def delete_index(collection: Collection, data: dict):
filter_criteria = {
"database_name": data.get('database_name'),
"schemas.schema_name": data.get('schema_name', 'public'),
"schemas.tables.table_name": data.get('table_name'),
"schemas.tables.indexes.name": data.get('index').get('name')
}
update_criteria = {
"$pull": {
"schemas.$[s].tables.$[t].indexes": {
"name": data.get('index').get('name')
}
}
}
array_filters = [
{"s.schema_name": data.get('schema_name', 'public')},
{"t.table_name": data.get('table_name')}
]
# Perform the update operation
result = collection.update_one(
filter_criteria,
update_criteria,
array_filters=array_filters
)
# Output the result
if result.modified_count > 0:
print("Index deleted successfully.")
return True
else:
print("No matching index found or index was not deleted.")
return False
"""
Author: Owaiz Mustafa Khan
Email: owaiz.mustafakhan@rockwellautomation.com
"""
import logging
from sqlalchemy import text
from sqlalchemy.engine import reflection
from sqlalchemy.orm import Session
from scripts.db.psql.databases import engine
def _table_has_column(table, column):
insp = reflection.Inspector.from_engine(engine)
has_column = False
for col in insp.get_columns(table):
if column != col["name"]:
continue
has_column = True
return has_column
def _table_has_index(table, index):
insp = reflection.Inspector.from_engine(engine)
has_index = False
for ind in insp.get_indexes(table):
if index != ind["name"]:
continue
has_index = True
return has_index
def create_index_from_data(index_data: dict):
index = index_data["index"]
if index_data.__contains__('new_index'):
index = index_data['new_index']
index_name = index["name"]
columns = index["columns"]
using = index.get("type", "btree").upper()
unique = index.get("unique", False)
schema = index_data.get("schema_name", "public")
table_name = index_data["table_name"]
column_str = ", ".join(f'"{col}"' for col in columns)
unique_str = "UNIQUE " if unique else ""
create_sql = (
f'CREATE {unique_str}INDEX IF NOT EXISTS "{index_name}" '
f'ON "{schema}"."{table_name}" USING {using} ({column_str});'
)
with engine.begin() as conn:
conn.execute(text(create_sql))
print(f"Created index: {index_name} on {schema}.{table_name}")
def drop_index_from_data(index_data: dict):
index_name = index_data["index"]["name"]
schema = index_data.get("schema_name", "public") # fallback to 'public'
drop_sql = f'DROP INDEX IF EXISTS "{schema}"."{index_name}";'
with engine.begin() as conn:
conn.execute(text(drop_sql))
print(f"Dropped index: {schema}.{index_name}")
def recreate_index(index_data: dict):
drop_index_from_data(index_data)
create_index_from_data(index_data)
print(f"Updated index: {index_data['new_index']['name']} on {index_data.get('schema_name', 'public')}.{index_data.get('table_name')}")
"""
Author: Owaiz Mustafa Khan
Email: owaiz.mustafakhan@rockwellautomation.com
"""
import pymongo
from pymongo.errors import OperationFailure
from scripts.schemas.mongo_schema import DeleteIndex, UpdateIndexFromDB
from scripts.utils.common import get_collection
def get_data(data: str):
if data == 'ASC':
return pymongo.ASCENDING
elif data == 'DESC':
return pymongo.DESCENDING
elif data == 'GEO2D':
return pymongo.GEO2D
elif data == 'GEOSPHERE':
return pymongo.GEOSPHERE
elif data == 'HASHED':
return pymongo.HASHED
elif data == 'TEXT':
return pymongo.TEXT
def get_collection_info(index: dict):
collection_name = index.get('collection', index.get('collection_name'))
db_name = index.get('db', index.get('db_name'))
return collection_name, db_name
def get_index_info(index: dict) -> list[dict]:
collection_name, db_name = get_collection_info(index)
collection = get_collection(collection_name, db_name)
# Get all index from mongo
all_index = collection.list_indexes()
# Format to readable data
index_infos = [
{"name": idx["name"], "fields": list(idx["key"].keys())}
for idx in all_index
]
return index_infos
def index_exists(index: dict, by_name: bool = False) -> bool:
index_infos = get_index_info(index)
if by_name:
for i in index_infos:
if index.get('name') == i.get('name'):
return True
return False
# Compare if index already exists
for index_info in index_infos:
if index.get('fields') == index_info.get('fields'):
return True
return False
def make_keys(indexes: list[dict]) -> list[tuple] | str:
result = list()
keys = indexes[0].get('keys')
if indexes[0].get('type') == 'simple':
return keys
for key in keys:
result.append((
key[0],
get_data(key[1])
))
return result
def add_new_index(index_info):
try:
collection_name, db_name = get_collection_info(index_info)
indexes = index_info.get('indexes')
additional_properties = indexes[0].get('additional_properties')
fields = make_keys(indexes)
collection = get_collection(collection_name, db_name)
if not additional_properties:
collection.create_index(keys=fields)
return
collection.create_index(
keys = fields,
unique=additional_properties.get('unique', False),
sparse=additional_properties.get('sparse', False),
hidden=additional_properties.get('hidden', False),
background=additional_properties.get('background', False)
)
except OperationFailure as of:
if 'Index with name: email_1 already exists with different options' in str(of):
print('Cannot add index as it already exists')
return
except Exception as e:
print(e)
return
def update_all_index_from_db(payload: UpdateIndexFromDB):
try:
collection = get_collection(payload.collection_name, payload.db_name)
# Get all index metadata stored in Mongo
index_infos = list(collection.find({}, {'_id': 0}))
# Checks that data is present
if not len(index_infos):
return {
'status': 'COMPLETED',
'message': 'No index metadata found to create indexes.'
}
# Iterate through data and add new index
for index_info in index_infos:
if index_exists(index_info): # Checking if the index already exists
continue
add_new_index(index_info) # Adding new index
except Exception as e:
print(f'Exception occurred during update: {e}')
exit(0)
def delete_index(payload: DeleteIndex):
try:
i_collection = get_collection(payload.collection_name, payload.db_name)
if not index_exists(payload.model_dump(), by_name=True):
return
collection = get_collection(payload.metadata_collection_name, payload.metadata_db_name)
index_infos = get_index_info(payload.model_dump())
# Deleting metadata from mongo
docs = collection.find({"db": payload.db_name, "collection": payload.collection_name})
for doc in docs:
indexes = doc.get("indexes", [])
new_indexes = []
for index in indexes:
index_type = index.get("type")
keys = index.get("keys")
if isinstance(keys, list):
_ = list()
for key in keys:
_.append(key[0])
keys = _
for index_info in index_infos:
if ([keys] if index_type == 'simple' else keys) == index_info.get('fields'):
print(f"Deleting entire document: {doc['_id']}")
collection.delete_one({"_id": doc["_id"]})
break
else:
new_indexes.append(index)
else:
# If loop wasn't broken (i.e., not deleted), update or delete based on new index array
if not new_indexes:
collection.delete_one({"_id": doc["_id"]})
else:
collection.update_one(
{"_id": doc["_id"]},
{"$set": {"indexes": new_indexes}}
)
i_collection.drop_index(payload.name)
except Exception as e:
print(f'Some exception occurred while deleting the index: {e}')
exit(0)
delete_index(DeleteIndex(
name='lookup_name_-1_lookup_id_-1',
collection_name='lookup_table',
metadata_collection_name='test_collection',
db_name='ilens_configuration',
metadata_db_name='__test'
))
# delete_index(DeleteIndex(
# name='id_1',
# collection_name='design_tag_data',
# metadata_collection_name='test_collection',
# db_name='ilens_configuration',
# metadata_db_name='__test'
# ))
# update_all_index_from_db(UpdateIndexFromDB())
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment