Commit 7afc5a71 authored by mohammed.shibili's avatar mohammed.shibili

first commit

parents
SERVICE_HOST=127.0.0.1
SERVICE_PORT=8000
WORKERS=1
PROJECT_NAME=EMS-Automation
DB_NAME=ilens_widget
LOG_PATH=log
LOG_LEVEL=INFO
BACKUP_COUNT=5
MAX_BYTES=10000000
CONFIG_PATH=DATA/Config/
CONFIG_FILE_NAME=config.yml
UPLOAD_LOG_PATH=/DATA/Config/
#staging_new
#BASE_URL=https://rtmi.aarti-industries.unifytwin.com/
#USER_ID=CZ8vXWpJLza8TXXnpN2sFo
#PROJECT_ID=project_101
#LOGIN_TOKEN=9d858dfff7564571b191b4c6eb7f5ab8
#BASE_URL=https://staging.unifytwin.com/
#USER_ID=56jyuX7Z9M4HkoeGfTNJWk
#PROJECT_ID=project_445
#LOGIN_TOKEN=1a54e0de3a81494ab95ae56d0c24a1af
BASE_URL=https://staging.unifytwin.com/
USER_ID=7BYVRyxnzE4MS33Gv8Y6eB
PROJECT_ID=project_437
LOGIN_TOKEN=d102d0403da94627a812836c7e2a66f8
NEW_BASE_URL=https://staging.unifytwin.com/
NEW_USER_ID=56jyuX7Z9M4HkoeGfTNJWk
NEW_PROJECT_ID=project_445
NEW_LOGIN_TOKEN=8eeb913e1d1e4da9bcdcfd788f3e037e
# Default ignored files
/shelf/
/workspace.xml
from scripts.config import Services
from scripts.logging.logger import logger
import uvicorn
import argparse
__author__ = 'H115-925 MOHAMMED SHIBILI O'
__date__ = '17 January 2024'
ap = argparse.ArgumentParser()
if __name__ == "__main__":
try:
logger.info(f"****Starting {Services.PROJECT_NAME} ***")
print(f"****************************Starting {Services.PROJECT_NAME}****************************")
ap.add_argument(
"--port",
"-p",
required=False,
default=Services.SERVICE_PORT,
help="Port to start the application.",
)
ap.add_argument(
"--bind",
"-b",
required=False,
default=Services.SERVICE_HOST,
help="IF to start the application.",
)
ap.add_argument(
"--workers",
"-w",
type=int,
default=Services.WORKERS,
help="Number of worker processes to use.",
)
arguments = vars(ap.parse_args())
logger.info(f"App Starting at {arguments['bind']}:{arguments['port']} with {arguments['workers']} workers.")
uvicorn.run(
"main:app",
host=arguments["bind"],
port=int(arguments["port"]),
workers=arguments["workers"]
)
except Exception as e:
logger.error(f"Error from app - {e}")
File added
This diff is collapsed.
import uvicorn
from fastapi import FastAPI
from scripts import services
from scripts.config import Services
from scripts.logging.logger import logger
app = FastAPI()
app.include_router(services.router)
# starting the application
if __name__ == "__main__":
try:
uvicorn.run(services.router, host=Services.SERVICE_HOST, port=Services.SERVICE_PORT)
except Exception as e:
logger.exception(e)
uvicorn~=0.26.0
fastapi~=0.109.0
pydantic~=2.5.3
python-dotenv~=1.0.0
pandas~=2.1.4
httpx~=0.26.0
tzlocal~=5.2
\ No newline at end of file
from pydantic import Field
from pydantic_settings import BaseSettings
from dotenv import load_dotenv
load_dotenv()
class _Services(BaseSettings):
SERVICE_HOST: str = Field(default="127.0.0.1", env="service_host")
SERVICE_PORT: int = Field(default=8001, env="service_port")
PROJECT_NAME: str = Field(default="EMS-Automation", env="project_name")
ENABLE_CORS: bool = True
CORS_ALLOW_CREDENTIALS: bool = True
CORS_ALLOW_METHODS: list = ["GET", "POST"]
CORS_ALLOW_HEADERS: list = ["*"]
LOG_LEVEL: str
BACKUP_COUNT: int
MAX_BYTES: int
ENABLE_FILE_LOGGING: bool = True
ENABLE_BACKUP_STORING: bool = Field(default=False, env="enable_backup")
WORKERS: int = Field(default=12, env="workers")
class _BasePathConf(BaseSettings):
BASE_URL: str = Field(default="http://192.168.0.220/", env="base_path")
NEW_BASE_URL: str = Field(default="http://192.168.0.220/", env="new_base_url")
class _PathConf(BaseSettings):
LOG_PATH: str
CONFIG_PATH: str
class _Databases(BaseSettings):
MONGO_URI: str = Field(default="mongodb://localhost:27017", env="mongo_uri")
# DB_ASSET_CONSTANTS: str = Field(default="Asset_manager_constants", env="db_asset_constants")
DB_NAME: str = Field(default="ilens_widget", env="db_name")
class _ProjectDetails(BaseSettings):
USER_ID: str = Field(default="user_099", env="user_id")
PROJECT_ID: str = Field(default="project_099", env="project_id")
LOGIN_TOKEN: str = Field(default="0224903f578741f1a092679365f371d8", env="login_token")
NEW_USER_ID: str = Field(default="user_099", env="new_user_id")
NEW_PROJECT_ID: str = Field(default="project_099", env="new_project_id")
NEW_LOGIN_TOKEN: str = Field(default="1e0fe69ec4f6473da8444f2d8cc54d81", env="new_login_token")
Services = _Services()
PathConf = _PathConf()
Databases = _Databases()
Base_path = _BasePathConf()
Project_details = _ProjectDetails()
__all__ = [
"Services",
"PathConf",
"Databases",
"Base_path",
"Project_details"
]
class ApiEndpoints:
dashboards = "/dashboards"
widget = "/create_json"
save_widget = "/widget"
save_app = "/app"
copy_app = "/copy_app"
create_new = "/create"
class RequestEndpoints:
save_dashboard = "visual4.0/dashboard/save"
list_dashboards = "visual4.0/dashboard/list"
save_widget = "visual4.0/widget/save"
save_app = "ilens_api/ilens_config/save_embedded_apps"
hierarchy_details = "hry/hry/fetch_tags"
get_site_level_hierarchy = "ilens_api/ilens_config/get_site_level_hierarchy"
list_embedded_apps = "ilens_api/ilens_config/list_embedded_apps"
list_embedded_apps_id_based = "ilens_api/ilens_config/list_embedded_apps_based_on_id"
list_widgets = "visual4.0/widget/list"
import yaml
class DatabaseConstants:
with open('config.yml', 'r') as f:
data = yaml.safe_load(f)
collection_dashboard = data["collection"]["collection_dashboard"]
This diff is collapsed.
class KeyConstants:
output_file = "widget.json"
from scripts.config import Project_details
class AppPayload:
list_embedded_apps = {
"project_id": "",
"keys": [
"apps",
"types",
"category",
"headerContent"
],
"project_type": "customer",
"tz": "Asia/Kolkata",
"language": "en"
}
list_embedded_apps_id_based = {
"app_id": "",
"project_id": "",
"name": "",
"project_type": "customer",
"tz": "Asia/Kolkata",
"language": "en"
}
class DashboardPayloads:
list_widgets = {
"dashboard_id": "",
"name": "",
"disable_usage_tracking": False,
"project_id": "",
"project_type": "customer",
"tz": "Asia/Kolkata",
"language": "en"
}
cookies = {
"login-token": Project_details.LOGIN_TOKEN, "userId": Project_details.USER_ID,
"projectId": Project_details.PROJECT_ID
}
import httpx
from scripts.config import Project_details, Base_path
from scripts.constants.api import RequestEndpoints
from scripts.constants.json_constants import list_dashboard_payload
from scripts.constants.request_payloads import cookies
from scripts.logging.logger import logger
class Dashboards:
@staticmethod
def get_dashboard_details(dashboard_name):
try:
logger.info(f'service started in getting the dashboard reference')
existing_dashboard = list_dashboard_payload
existing_dashboard["searchText"] = dashboard_name["dashboard_name"]
existing_dashboard["filters"]["search"] = dashboard_name["dashboard_name"]
existing_dashboard["project_id"] = Project_details.PROJECT_ID
list_url = f'{Base_path.BASE_URL}{RequestEndpoints.list_dashboards}'
dashboard_lists = httpx.post(url=list_url, cookies=cookies, json=list_dashboard_payload)
json_response = dashboard_lists.json()
list_of_dashboards = json_response["data"]["body_content"]
if dashboard_lists.status_code != 200:
return False
dashboard_details = {}
for item in list_of_dashboards:
if item.get('name') == dashboard_name["dashboard_name"]:
dashboard_details = item
return dashboard_details
except Exception as e:
logger.exception(f'error while fetching the dashboard details {e}')
@staticmethod
def create_new_dashboard(existing_dashboard, new_dashboard_name):
try:
"""tested until this"""
logger.info(f'service started in creating new dashboard')
category = existing_dashboard["category"]
dashboard_type = existing_dashboard["type"]
dashboard_name = new_dashboard_name
list_of_dashboards = Dashboards().get_dashboard_details({"dashboard_name": dashboard_name})
# for data in list_of_dashboards:
if list_of_dashboards.get('name') == new_dashboard_name:
dashboard_name = f'{new_dashboard_name}_new'
Dashboards().create_dashboard(dashboard_name, category, dashboard_type)
new_dashboard_list = Dashboards().get_dashboard_details({"dashboard_name": dashboard_name})
dashboard_id = ""
# for data in new_dashboard_list:
if new_dashboard_list.get('name') == dashboard_name:
dashboard_id = new_dashboard_list.get('dashboard_id', '')
logger.info(f'dashboard id >>>> {dashboard_id}')
if dashboard_id:
return dashboard_id
else:
return False
except Exception as e:
logger.error(f'error while creating new dashboard {e}')
@staticmethod
def create_dashboard(dashboard_data, category, dashboard_type):
try:
logger.debug(f'creating json payload')
payload = {
"category": category,
"name": dashboard_data,
"project_id": Project_details.PROJECT_ID,
"type": dashboard_type,
"project_type": "customer",
"tz": "Asia/Kolkata",
"language": "en"
}
url = f'{Base_path.BASE_URL}{RequestEndpoints.save_dashboard}'
response = httpx.post(url=url, cookies=cookies, json=payload)
return response
except Exception as e:
logger.error(f'error in create dashboard {e}')
This diff is collapsed.
import logging
import pathlib
from logging import StreamHandler
from logging.handlers import RotatingFileHandler, SocketHandler
from scripts.config import Services, PathConf
def read_configuration():
return {
"name": Services.PROJECT_NAME,
"handlers": [
{"type": "RotatingFileHandler", "max_bytes": Services.MAX_BYTES, "back_up_count": Services.BACKUP_COUNT},
{"type": "StreamHandler", "name": Services.PROJECT_NAME},
],
}
def init_logger():
logging_config = read_configuration()
"""
Creates a rotating log
"""
__logger__ = logging.getLogger(Services.PROJECT_NAME)
__logger__.setLevel(Services.LOG_LEVEL)
log_formatter = "%(asctime)s - %(levelname)-6s - [%(funcName)5s(): %(lineno)s] - %(message)s"
time_format = "%Y-%m-%d %H:%M:%S"
formatter = logging.Formatter(log_formatter, time_format)
for each_handler in logging_config["handlers"]:
if (
each_handler["type"] in ["RotatingFileHandler"]
and Services.ENABLE_FILE_LOGGING
):
pathlib.Path(PathConf.LOG_PATH).mkdir(parents=True, exist_ok=True)
log_file = pathlib.Path(PathConf.LOG_PATH, f"{Services.PROJECT_NAME}.log")
temp_handler = RotatingFileHandler(
log_file,
maxBytes=each_handler["max_bytes"],
backupCount=each_handler["back_up_count"],
)
temp_handler.setFormatter(formatter)
elif each_handler["type"] in ["SocketHandler"]:
temp_handler = SocketHandler(each_handler["host"], each_handler["port"])
elif each_handler["type"] in ["StreamHandler"]:
temp_handler = StreamHandler()
temp_handler.setFormatter(formatter)
else:
temp_handler = None
__logger__.addHandler(temp_handler)
return __logger__
logger = init_logger()
from fastapi import APIRouter
from scripts.services import dashboards
router = APIRouter()
router.include_router(dashboards.router)
import logging
import pandas as pd
from fastapi import APIRouter, UploadFile, File, Form
from scripts.config import Services
from scripts.constants.api import ApiEndpoints
from scripts.core.handlers.dashboard_handler import Dashboards
from scripts.core.handlers.widget_handler import Widgets
from scripts.utils.dashboard_utils import create_json
logger = logging.getLogger(Services.PROJECT_NAME)
router = APIRouter(prefix=ApiEndpoints.dashboards)
dashboard_handler = Dashboards()
widget_handler = Widgets()
@router.post(ApiEndpoints.dashboards + ApiEndpoints.create_new, tags=["1.create dashboard"])
async def create_dashboard(file: UploadFile = File(...), sheet_name: str = Form(), new_dashboard_name: str = Form(),
existing_dashboard_name: str = Form()):
"""
:param existing_dashboard_name: name of existing dashboard which we need to take reference
:param new_dashboard_name: name of new dashboard to be created
:param file: Excel file with parameter details
:param sheet_name: name of the sheet which we need to create dashboard
:return: success or failure message
"""
try:
logger.info(f'service started in creating dashboard')
with open(file.filename, "wb") as f:
f.write(file.file.read())
with pd.ExcelFile(file.filename, engine="openpyxl") as xls:
parameters_df = pd.read_excel(xls, sheet_name=sheet_name)
parameters_json = create_json(parameters_df)
dashboard_name = {"dashboard_name": existing_dashboard_name}
existing_dashboards = dashboard_handler.get_dashboard_details(dashboard_name)
if existing_dashboards:
existing_widget = widget_handler.get_widget_details(existing_dashboards)
if existing_widget:
dashboard_response = dashboard_handler.create_new_dashboard(existing_dashboards, new_dashboard_name)
if dashboard_response:
widget_response, missing_tags = widget_handler.create_new_widgets(existing_widget, parameters_json,
dashboard_response)
if widget_response or missing_tags:
return {
"widget": f'widget number:{widget_response} missing',
"tag": f'tag {missing_tags} not found'
}
else:
return {
"status": "failed",
"error": "error in getting widget details"
}
else:
return {
"status": "failed",
"error": "error in getting dashboard details"
}
except Exception as e:
logger.exception(f'error while creating dashboard {e}')
import json
import httpx
from tzlocal import get_localzone
from scripts.config import Base_path, Project_details
from scripts.constants.api import RequestEndpoints
from scripts.constants.keys import KeyConstants
from scripts.logging.logger import logger
def get_local_timezone():
local_timezone = get_localzone()
return local_timezone
def get_dashboards_list(list_dashboard_payload, cookies):
if not cookies:
cookies = {
"login-token": Project_details.LOGIN_TOKEN, "userId": Project_details.USER_ID,
"projectId": Project_details.PROJECT_ID
}
list_url = f'{Base_path.BASE_URL}{RequestEndpoints.list_dashboards}'
dashboard_lists = httpx.post(url=list_url, cookies=cookies, json=list_dashboard_payload)
json_response = dashboard_lists.json()
list_of_dashboards = json_response["data"]["body_content"]
return list_of_dashboards
def get_hierarchy(data):
combination_dict = {}
filter_dict = {}
for doc in data:
widget_type_key = f"{doc['widget_name']}_{doc['type']}"
filter_combination = f"{doc['site']}>"
combination = f"{doc['site']}>"
filter_combination += f"{doc['plant']}>" if doc['plant'] else ""
combination += f"{doc['plant']}>" if doc['plant'] else ""
filter_combination += f"{doc['line']}>" if doc['line'] else ""
combination += f"{doc['line']}>" if doc['line'] else ""
filter_combination += f"{doc['equipment']}" if doc['equipment'] else ""
combination += f"{doc['equipment']}:" if doc['equipment'] else ""
combination += f"{doc['parameters']}" if doc['parameters'] else ""
# Check if the combination is already present
if widget_type_key in combination_dict:
combination_dict[widget_type_key].append(combination)
else:
combination_dict[widget_type_key] = [combination]
if widget_type_key not in filter_dict:
filter_dict[widget_type_key] = [filter_combination]
else:
if filter_combination not in filter_dict[widget_type_key]:
filter_dict[widget_type_key].append(filter_combination)
return combination_dict, filter_dict
def create_hierarchy(json_data):
combination_list = []
filter_list = []
for data in json_data:
filter_data = data["Tag Name"].split(":")[0]
data["filter"] = filter_data
filter_list.append(data)
combination_list.append(data)
return combination_list, filter_list
def get_new_dashboards_list(list_dashboard_payload, cookies):
list_url = f'{Base_path.NEW_BASE_URL}{RequestEndpoints.list_dashboards}'
dashboard_lists = httpx.post(url=list_url, cookies=cookies, json=list_dashboard_payload)
json_response = dashboard_lists.json()
list_of_dashboards = json_response["data"]["body_content"]
return list_of_dashboards
def create_json(df):
try:
logger.debug(f'creating json')
data_dict = df.to_dict(orient='split')
keys = data_dict['columns']
values = data_dict['data']
json_data = [dict(zip(keys, v)) for v in values]
with open(KeyConstants.output_file, 'w') as f:
json.dump(json_data, f)
return json_data
except Exception as e:
logger.error(f'error while creating json {e}')
from __future__ import annotations
import logging
from typing import Any, Dict, List, Mapping, Optional, Sequence, Tuple, Union
from pymongo import MongoClient
from pymongo.command_cursor import CommandCursor
from pymongo.cursor import Cursor
from pymongo.results import (
DeleteResult,
InsertManyResult,
InsertOneResult,
UpdateResult,
)
class MongoCollectionBaseClass:
def __init__(
self,
mongo_client: MongoClient,
database: str,
collection: str,
) -> None:
self.client = mongo_client
self.database = database
self.collection = collection
def __repr__(self) -> str:
return f"{self.__class__.__name__}(database={self.database}, collection={self.collection})"
def insert_one(self, data: Dict) -> InsertOneResult:
"""
The function is used to inserting a document to a collection in a Mongo Database.
:param data: Data to be inserted
:return: Insert ID
"""
try:
database_name = self.database
collection_name = self.collection
db = self.client[database_name]
collection = db[collection_name]
return collection.insert_one(data)
except Exception as e:
logging.exception(e)
raise
def insert_many(self, data: List) -> InsertManyResult:
"""
The function is used to inserting documents to a collection in a Mongo Database.
:param data: List of Data to be inserted
:return: Insert IDs
"""
try:
database_name = self.database
collection_name = self.collection
db = self.client[database_name]
collection = db[collection_name]
return collection.insert_many(data)
except Exception as e:
logging.exception(e)
raise
def find(
self,
query: dict,
filter_dict: Optional[dict] = None,
sort: Union[
None, str, Sequence[Tuple[str, Union[int, str, Mapping[str, Any]]]]
] = None,
skip: int = 0,
limit: Optional[int] = None,
) -> Cursor:
"""
The function is used to query documents from a given collection in a Mongo Database
:param query: Query Dictionary
:param filter_dict: Filter Dictionary
:param sort: List of tuple with key and direction. [(key, -1), ...]
:param skip: Skip Number
:param limit: Limit Number
:return: List of Documents
"""
if sort is None:
sort = []
if filter_dict is None:
filter_dict = {"_id": 0}
database_name = self.database
collection_name = self.collection
try:
db = self.client[database_name]
collection = db[collection_name]
if len(sort) > 0:
cursor = (
collection.find(
query,
filter_dict,
)
.sort(sort)
.skip(skip)
)
else:
cursor = collection.find(
query,
filter_dict,
).skip(skip)
if limit:
cursor = cursor.limit(limit)
return cursor
except Exception as e:
logging.exception(e)
raise
def find_one(self, query: dict, filter_dict: Optional[dict] = None) -> dict | None:
try:
database_name = self.database
collection_name = self.collection
if filter_dict is None:
filter_dict = {"_id": 0}
db = self.client[database_name]
collection = db[collection_name]
return collection.find_one(query, filter_dict)
except Exception as e:
logging.exception(e)
raise
def update_one(
self,
query: dict,
data: dict,
upsert: bool = False,
strategy: str = "$set",
) -> UpdateResult:
"""
:param strategy:
:param upsert:
:param query:
:param data:
:return:
"""
try:
database_name = self.database
collection_name = self.collection
db = self.client[database_name]
collection = db[collection_name]
return collection.update_one(query, {strategy: data}, upsert=upsert)
except Exception as e:
logging.exception(e)
raise
def update_many(
self, query: dict, data: dict, upsert: bool = False
) -> UpdateResult:
"""
:param upsert:
:param query:
:param data:
:return:
"""
try:
database_name = self.database
collection_name = self.collection
db = self.client[database_name]
collection = db[collection_name]
return collection.update_many(query, {"$set": data}, upsert=upsert)
except Exception as e:
logging.exception(e)
raise
def delete_many(self, query: dict) -> DeleteResult:
"""
:param query:
:return:
"""
try:
database_name = self.database
collection_name = self.collection
db = self.client[database_name]
collection = db[collection_name]
return collection.delete_many(query)
except Exception as e:
logging.exception(e)
raise
def delete_one(self, query: dict) -> DeleteResult:
"""
:param query:
:return:
"""
try:
database_name = self.database
collection_name = self.collection
db = self.client[database_name]
collection = db[collection_name]
return collection.delete_one(query)
except Exception as e:
logging.exception(e)
raise
def distinct(self, query_key: str, filter_json: Optional[dict] = None) -> list:
"""
:param query_key:
:param filter_json:
:return:
"""
try:
database_name = self.database
collection_name = self.collection
db = self.client[database_name]
collection = db[collection_name]
return collection.distinct(query_key, filter_json)
except Exception as e:
logging.exception(e)
raise
def aggregate(
self,
pipelines: list,
) -> CommandCursor:
try:
database_name = self.database
collection_name = self.collection
db = self.client[database_name]
collection = db[collection_name]
return collection.aggregate(pipelines)
except Exception as e:
logging.exception(e)
raise
""" Mongo DB utility
All definitions related to mongo db is defined in this module
"""
from pymongo import MongoClient
from .mongo_tools import mongo_sync
from ..logging.logger import logger
class MongoConnect:
def __init__(self, uri):
try:
self.uri = uri
self.client = MongoClient(uri, connect=False)
except Exception as e:
logger.exception(e)
raise
def __call__(self, *args, **kwargs):
return self.client
def get_client(self):
return self.client
def __repr__(self):
return f"Mongo Client(uri:{self.uri}, server_info={self.client.server_info()})"
@staticmethod
def get_base_class():
return mongo_sync.MongoCollectionBaseClass
class MongoStageCreator:
@staticmethod
def add_stage(stage_name: str, stage: dict) -> dict:
return {stage_name: stage}
def projection_stage(self, stage: dict) -> dict:
return self.add_stage("$project", stage)
def match_stage(self, stage: dict) -> dict:
return self.add_stage("$match", stage)
def lookup_stage(self, stage: dict) -> dict:
return self.add_stage("$lookup", stage)
def unwind_stage(self, stage: dict) -> dict:
return self.add_stage("$unwind", stage)
def group_stage(self, stage: dict) -> dict:
return self.add_stage("$group", stage)
def add_fields(self, stage: dict) -> dict:
return self.add_stage("$addFields", stage)
def sort_stage(self, stage: dict) -> dict:
return self.add_stage("$sort", stage)
def regex_stage(self, stage: dict) -> dict:
return self.add_stage("$regex", stage)
def not_equal(self, stage: dict) -> dict:
return self.add_stage("$ne", stage)
[{"Tag Name": "NEO>MEA Plant>Column Section>Moisture Removal Column C-1122:Running Status(P1127)", "Label": "Running Status", "Existing Widget Name": "Widget", "New widget Name": "Test 1", "Null": "Automation test KPI"}, {"Tag Name": "NEO>MEA Plant>Column Section>Moisture Removal Column C-1122:Outlet Flow(FIC11295)", "Label": "Outlet Flow", "Existing Widget Name": "Widget", "New widget Name": "Test 1", "Null": "Automation test KPI"}, {"Tag Name": "NEO>MEA Plant>Column Section>Moisture Removal Column C-1122:Bottom Temperature(TIC11225)", "Label": "Bottom Temperature", "Existing Widget Name": "Widget", "New widget Name": "Test 1", "Null": "Automation test KPI"}, {"Tag Name": "NEO>MEA Plant>Column Section>Moisture Removal Column C-1122:Middle Top Temperature(TIC11222)", "Label": "Middle Top Temp", "Existing Widget Name": "Widget", "New widget Name": "Test 1", "Null": "Automation test KPI"}, {"Tag Name": "NEO>MEA Plant>Column Section>Moisture Removal Column C-1122:Top Flow(FIC11269)", "Label": "Top Flow", "Existing Widget Name": "Widget", "New widget Name": "Test 1", "Null": "Automation test KPI"}, {"Tag Name": "NEO>MEA Plant>Column Section>Moisture Removal Column C-1122:Differential Pressure(PDI11223)", "Label": "Differential Pressure", "Existing Widget Name": "Widget", "New widget Name": "Test 1", "Null": "Automation test KPI"}, {"Tag Name": "NEO>66 TPH Boiler>Steam And Water System>Steam Prs Station 2PIC0303:Pressure(2PIC0303)", "Label": "Pressure", "Existing Widget Name": "Widget", "New widget Name": "Test 1", "Null": "Automation test KPI"}, {"Tag Name": "NEO>MEA Plant>Column Section>Vaccum Pump System S607:Current(S607_AMP)", "Label": "Current", "Existing Widget Name": "Widget2", "New widget Name": "Test 2", "Null": "Automation test KPI"}, {"Tag Name": "NEO>MEA Plant>Column Section>Vaccum Pump System S607:Pressure(PT-1)", "Label": "Pressure", "Existing Widget Name": "Widget2", "New widget Name": "Test 2", "Null": "Automation test KPI"}, {"Tag Name": "NEO>MEA Plant>Reaction Section>Catalyst Prepeartion Reactor R-1005:Inlet valve(HS10035)", "Label": "Inlet valve", "Existing Widget Name": "Widget2", "New widget Name": "Test 2", "Null": "Automation test KPI"}]
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment