Commit f8692e94 authored by harshavardhan.c's avatar harshavardhan.c

oee config services commit

parent 15ccf0a8
if __name__ == '__main__':
from dotenv import load_dotenv
load_dotenv()
from scripts.logging import logger
import argparse
import gc
import uvicorn
from scripts.config import Service
gc.collect()
ap = argparse.ArgumentParser()
if __name__ == "__main__":
ap.add_argument(
"--port",
"-p",
required=False,
default=Service.PORT,
help="Port to start the application.",
)
ap.add_argument(
"--bind",
"-b",
required=False,
default=Service.HOST,
help="IP to start the application.",
)
arguments = vars(ap.parse_args())
logger.info(f"App Starting at {arguments['bind']}:{arguments['port']}")
uvicorn.run("main:app", host=arguments["bind"], port=int(arguments["port"]))
# To specify a variable to be read from environment, use $<env_name>
[service]
[SERVICE]
host=0.0.0.0
port=6869
workers=1
......@@ -14,16 +14,17 @@ allow_cross_origin = true
level=DEBUG
[postgres]
[POSTGRES]
uri=$OEE_POSTGRES_URI
downtime_uri=$DOWNTIME_URI
[mongo]
[MONGO]
uri=$MONGO_URI
[PATH_TO_SERVICES]
event_explorer=$EVENT_EXPLORER
metadata_proxy=$METADATA_PROXY
downtime_proxy=$DOWNTIME_PROXY
[DIRECTORY]
base_path = $BASE_PATH
......@@ -34,4 +35,20 @@ keys_path = data/keys
uri=$REDIS_URI
login_db = 9
project_tags_db = 18
downtime_db=30
\ No newline at end of file
downtime_db=30
oee_production_db=31
live_tags_db=12
[MQTT]
host = $MQTT_URL
port = $MQTT_PORT
publish_base_topic = ilens/notifications
[KAFKA]
host=$KAFKA_HOST
port=$KAFKA_PORT
topic=$KAFKA_TOPIC
enable_sites_partition=$ENABLE_KAFKA_PARTITION
split_key=$KAFKA_PARTITION_KEY
round_robin_enable=$ROUND_ROBIN_PARTITION
partition_db=13
\ No newline at end of file
-----BEGIN RSA PRIVATE KEY-----
MIICWwIBAAKBgQClilTaeHq6Zc+kWHCNl1O0btGRm7ct3O5zqWx1mwwLUWH14eft
Hi5wIbOYh79JQ9BO2OA4UjPq31uwmJ96Okl0OULfENhwd/D7P3mnoRlktPT2t+tt
RRrKvx3wNpOy/3nBsXnNt8EKxyA7k9vbqLbv9pGw2hcqOYe/NGTkmm1PswIDAQAB
AoGAZPARR1l5NBkKYGKQ1rU0E+wSmx+AtVVmjF39RUSyNmB8Q+poebwSgsr58IKt
T6Yq6Tjyl0UAZTGmferCK0xJJrqyP0hMn4nNNut+acWMKyt+9YrA2FO+r5Jb9JuT
SK35xXnM4aZLGppgWJxRzctpIz+qkf6oLRSZme0AuiqcwYECQQDY+QDL3wbWplRW
bze0DsZRMkDAkNY5OCydvjte4SR/mmAzsrpNrS5NztWbaaQrefoPbsdYBPbd8rS7
C/s/0L1zAkEAw1EC5zt2STuhkcKLa/tL+bk8WHHHtf19aC9kBj1TvWBFh+JojWCo
86iK5fLcHzhyQx5Qi3E9LG2HvOWhS1iUwQJAKbEHHyWW2c4SLJ2oVXf1UYrXeGkc
UNhjclgobl3StpZCYAy60cwyNo9E6l0NR7FjhG2j7lzd1t4ZLkvqFmQU0wJATLPe
yQIwBLh3Te+xoxlQD+Tvzuf3/v9qpWSfClhBL4jEJYYDeynvj6iry3whd91J+hPI
m8o/tNfay5L+UcGawQJAAtbqQc7qidFq+KQYLnv5gPRYlX/vNM+sWstUAqvWdMze
JYUoTHKgiXnSZ4mizI6/ovsBOMJTb6o1OJCKQtYylw==
-----END RSA PRIVATE KEY-----
-----BEGIN PUBLIC KEY-----
MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQClilTaeHq6Zc+kWHCNl1O0btGR
m7ct3O5zqWx1mwwLUWH14eftHi5wIbOYh79JQ9BO2OA4UjPq31uwmJ96Okl0OULf
ENhwd/D7P3mnoRlktPT2t+ttRRrKvx3wNpOy/3nBsXnNt8EKxyA7k9vbqLbv9pGw
2hcqOYe/NGTkmm1PswIDAQAB
-----END PUBLIC KEY-----
import json
import os
from dataclasses import dataclass, field
from typing import Optional
from fastapi import FastAPI, Request, Response
from fastapi import FastAPI, Depends
from fastapi.middleware.cors import CORSMiddleware
from scripts.services.calculate_oee import calc_oee_router
from scripts.services.form_services import form_router
from scripts.services.meta_services import meta_service_router
from scripts.services.ui_services import ui_service_router
app = FastAPI(
title="iLens OEE V3",
version="5.5.0",
description="OEE App",
docs_url=os.environ.get("SW_DOCS_URL"),
openapi_url=os.environ.get("SW_OPENAPI_URL")
)
if os.environ.get("CORS_URLS").split(","):
from scripts.db.psql.create_default_tables import create_default_psql_dependencies
from scripts.utils.security_utils.decorators import CookieAuthentication
auth = CookieAuthentication()
@dataclass
class FastAPIConfig:
title: str = "OEE SERVICES"
description: str = "A comprehensive tool for calculating oee"
docs_url: str = os.environ.get("SW_DOCS_URL")
redoc_url: str = field(default=None)
# root_path: str = os.environ.get("MODULE_PROXY", default="/d_oee")
openapi_url: Optional[str] = os.environ.get("SW_OPENAPI_URL")
SECURE_ACCESS = True if os.environ.get("SECURE_ACCESS") in {'true', 'True', True} else False
if SECURE_ACCESS:
app_config_dict = FastAPIConfig().__dict__
app_config_dict.update({"dependencies": [Depends(auth)]})
app = FastAPI(**app_config_dict)
else:
app = FastAPI(**FastAPIConfig().__dict__)
if os.environ.get("ENABLE_CORS") in (True, "true", "True") and os.environ.get(
"CORS_URLS"
):
app.add_middleware(
CORSMiddleware,
allow_origins=os.environ.get("CORS_URLS").split(","),
allow_credentials=True,
allow_methods=["GET", "POST", "DELETE", "PUT"],
allow_headers=["*"]
allow_headers=["*"],
)
print(os.environ.get("CORS_URLS").split(","))
@app.middleware("http")
async def add_process_time_header(request: Request, call_next):
cookies = request.cookies
response: Response = await call_next(request)
response.headers.append("Cookie", json.dumps(cookies))
return response
app.include_router(calc_oee_router)
app.include_router(ui_service_router)
app.include_router(meta_service_router)
app.include_router(form_router)
@app.on_event("startup")
async def startup_event():
"""
This function is to create default database and tables,
"""
create_default_psql_dependencies()
import json
import time
from datetime import datetime
from typing import List
import pytz
from scripts.config import read_settings, DBConf
from scripts.constants import OEETagMappingKeys
from scripts.core.engine.automation_engine import AutomationEngine
from scripts.db.mongo.dbs.siam_oee import SiamOEE
from scripts.db.redis_conn import live_tags_db_object
from scripts.db.redis_connections import live_tags_db
from scripts.logging import logger
from scripts.utils.common_utils import CommonUtils
from scripts.utils.db_name_util import get_project_specific_key
class ProductionMonitor:
def __init__(self):
def __init__(self, project_id=None):
self.automation_engine = AutomationEngine()
self.settings = read_settings()
self.oee_mongo = SiamOEE()
self.machine_mode_tags = self.settings["automation"]["setup_time_logic"]["tags"]
self.manual_mode_tag = self.settings["automation"]["setup_time_logic"]["manual_mode_tag"]
self.auto_mode_tag = self.settings["automation"]["setup_time_logic"]["auto_mode_tag"]
self.production_count_tags = self.settings["automation"]["production_end_logic"]["tags"]
self.good_count_tag = self.settings["automation"]["production_end_logic"]["good_count_tag"]
self.inspection_count_tag = self.settings["automation"]["production_end_logic"]["inspection_count_tag"]
self.ng_count_tag = self.settings["automation"]["production_end_logic"]["ng_count_tag"]
self.common_util = CommonUtils()
@staticmethod
def get_redis_data(tag_list):
def get_redis_data(tags_list: List, project_id: str):
tag_data = {}
redis_response = live_tags_db_object.mget(tag_list)
for index, each_tag in enumerate(tag_list):
_val = redis_response.__getitem__(index)
if not _val:
continue
_redis_resp = json.loads(_val)
hierarchy = each_tag.removeprefix(DBConf.REDIS_PREFIX)
if hierarchy not in tag_data:
tag_data[hierarchy] = dict()
tag_data[hierarchy] = _redis_resp.get("value")
redis_project_prefix = get_project_specific_key(project_id=project_id)
updated_tag_list = [f'{redis_project_prefix}{_tag}' for _tag in tags_list]
redis_response = live_tags_db.mget(updated_tag_list)
for _value in redis_response:
tag_data.update(zip(updated_tag_list, _value))
return tag_data
def calculate_oee_params(self, data, downtime):
start_time = datetime.fromtimestamp(
data.get("start_time") // 1000, tz=pytz.timezone("Asia/Bangkok"))
end_time = datetime.now(tz=pytz.timezone("Asia/Bangkok"))
available_time = (end_time - start_time).total_seconds() / 60
if downtime > available_time:
downtime = 0
operating_time = available_time - downtime
availability = operating_time / available_time
good_count, units_produced = self.get_current_produced_count()
if not good_count:
good_count = 0
if not units_produced:
units_produced = 0
productive_time = units_produced * (1 / data.get("cycle_time"))
performance = productive_time / operating_time
if units_produced:
quality = good_count / units_produced
else:
quality = 0
oee = availability * performance * quality
return oee * 100, availability * 100, performance * 100, quality * 100
def calculate_setup_time(self, production_start_time: datetime, tz):
tag_list = [f"{DBConf.REDIS_PREFIX}{i}" for i in self.machine_mode_tags]
@staticmethod
def calculate_setup_time(production_start_time: datetime, tz, data):
while True:
try:
if (datetime.now(tz=pytz.timezone(tz)) - production_start_time).total_seconds() > 600:
return round((datetime.now(tz=pytz.timezone(tz)) - production_start_time).total_seconds() / 60)
tag_data = self.get_redis_data(tag_list)
if tag_data.get(self.manual_mode_tag) == 0 and tag_data.get(self.auto_mode_tag) == 1:
if data.get(OEETagMappingKeys.OEE_MANUAL_MODE_TAG) == 0 and data.get(
OEETagMappingKeys.OEE_AUTO_MODE_TAG) == 1:
print("production started!!!")
return round((datetime.now(tz=pytz.timezone(tz)) - production_start_time).total_seconds() / 60)
print(tag_data)
time.sleep(1)
except Exception as e:
print(e)
def check_mongo_for_finish(self, job_id, machine_type) -> bool:
data = self.oee_mongo.find_record(job_id, machine_type)
if data.get("prod_status", "") == "completed":
return True
else:
return False
def check_production_end(self, data):
total_count = data.get("actual_received_qty", data.get("qty_released"))
if total_count <= 0:
total_count = data.get("qty_released")
job_id = data.get("job", "")
machine_type = data.get("uf_process", "")
tag_list = [f"{DBConf.REDIS_PREFIX}{i}" for i in self.production_count_tags]
while True:
if self.check_mongo_for_finish(job_id, machine_type):
break
try:
tag_data = self.get_redis_data(tag_list)
if tag_data.get(self.good_count_tag, 0) >= total_count \
or tag_data.get(self.inspection_count_tag, 0) >= total_count \
or tag_data.get(self.inspection_count_tag, -1) == 0 \
or tag_data.get(self.good_count_tag, -1) == 0:
print("production ended")
break
time.sleep(1)
except Exception as e:
print(e)
return True
def calculate_oee_params(self, data, tz):
try:
start_time = datetime.fromtimestamp(
data.get("prod_start_time") // 1000, tz=pytz.timezone(tz))
end_time = datetime.now(tz=pytz.timezone(tz))
available_time = (end_time - start_time).total_seconds() / 60
# if downtime > available_time:
downtime = self.common_util.get_downtime_details_by_hierarchy(hierarchy=data["hierarchy"],
project_id=data["project_id"],
user_id=data.get("trigger_by"))
operating_time = available_time - downtime
oee_tags_list = data.get("oee_tags_list")
project_id = data.get("project_id")
if not oee_tags_list:
return
tags_data = self.get_redis_data(tags_list=oee_tags_list, project_id=project_id)
good_count, units_produced = self.get_current_produced_count(input_data=tags_data)
if not good_count:
good_count = 0
if not units_produced:
units_produced = 0
productive_time = units_produced * (1 / data.get("cycle_time"))
performance = productive_time / operating_time
availability = operating_time / available_time
if units_produced:
quality = good_count / units_produced
else:
quality = 0
oee = availability * performance * quality
return oee * 100, availability * 100, performance * 100, quality * 100
except Exception as e:
logger.exception(f"Exception occurred while updating the production batch {e.args[0]}")
def check_production_run(self):
tag_list = [f"{DBConf.REDIS_PREFIX}{i}" for i in self.production_count_tags]
@staticmethod
def check_production_run(input_data: dict):
while True:
try:
tag_data = self.get_redis_data(tag_list)
if tag_data.get(self.inspection_count_tag) > 0:
if input_data.get(OEETagMappingKeys.OEE_INSPECTION_TAG, 0) > 0:
return True
time.sleep(1)
except Exception as e:
print(e)
def get_current_produced_count(self):
tag_list = [f"{DBConf.REDIS_PREFIX}{i}" for i in self.production_count_tags]
@staticmethod
def get_current_produced_count(input_data: dict):
try:
tag_data = self.get_redis_data(tag_list)
return (
tag_data.get(self.good_count_tag),
tag_data.get(self.ng_count_tag, 0) + tag_data.get(self.good_count_tag, 0)
input_data.get(OEETagMappingKeys.OEE_GOOD_COUNT_TAG),
input_data.get(OEETagMappingKeys.OEE_NG_TAG, 0) + input_data.get(OEETagMappingKeys.OEE_GOOD_COUNT_TAG,
0)
)
except Exception as e:
print(e)
cryptography~=36.0.1
crypto~=1.4.1
fastapi~=0.74.1
httpx~=0.22.0
fastapi==0.75.2
httpx~=0.19.0
ilens-kafka-publisher==0.4.2
kafka-python~=2.0.2
pandas~=1.4.1
pandas==1.4.2
psycopg2-binary~=2.9.3
pydantic~=1.9.0
pyjwt~=2.3.0
pymongo~=4.0.1
pydantic==1.9.0
PyJWT==2.3.0
pytz==2022.1
pymongo~=3.7.2
pymssql~=2.2.4
pytest==3.2.4
python-dotenv~=0.19.2
pytz~=2021.3
pyyaml~=6.0
redis~=4.1.4
requests==2.26.0
sqlalchemy-utils~=0.38.2
sqlalchemy~=1.3.24
uvicorn~=0.17.5
\ No newline at end of file
pyyaml~=5.1
redis==4.2.2
shortuuid==1.0.8
SQLAlchemy==1.4.35
SQLAlchemy-Utils==0.38.2
uvicorn~=0.17.5
argparse~=1.4.0
DateTime~=4.3
paho-mqtt~=1.5.0
pendulum~=2.1.2
requests~=2.27.1
aiohttp~=3.8.1
numpy==1.22.3
\ No newline at end of file
......@@ -37,21 +37,18 @@ except Exception as e:
class Service:
HOST = config.get("service", "host")
PORT = config.getint("service", "port")
WORKERS = config.getint("service", "workers")
HOST = config.get("SERVICE", "host")
PORT = config.getint("SERVICE", "port")
WORKERS = config.getint("SERVICE", "workers")
secure_cookie = config.get("SERVICE", "secure_cookie")
class DBConf:
POSTGRES_URI = config.get('postgres', 'uri')
POSTGRES_URI = config.get('POSTGRES', 'uri')
if not POSTGRES_URI:
print("Error, environment variable OEE_POSTGRES_URI not set")
sys.exit(1)
DOWNTIME_URI = config.get('postgres', 'downtime_uri')
if not DOWNTIME_URI:
print("Error, environment variable DOWNTIME_URI not set")
sys.exit(1)
MONGO_URI = config.get('mongo', 'uri')
MONGO_URI = config.get('MONGO', 'uri')
if not MONGO_URI:
print("Error, environment variable MONGO_URI not set")
sys.exit(1)
......@@ -73,10 +70,12 @@ class PathToStorage(object):
class RedisConfig(object):
uri = config.get("REDIS", "uri")
login_db = config["REDIS"]["login_db"]
project_tags_db = config.getint("REDIS", "project_tags_db")
downtime_db = config.getint("REDIS", "downtime_db")
REDIS_URI = config.get("REDIS", "uri")
LOGIN_DB = config["REDIS"]["login_db"]
PROJECT_TAGS_DB = config.getint("REDIS", "project_tags_db")
DOWNTIME_DB = config.getint("REDIS", "downtime_db")
OEE_PRODUCTION_DB = config.getint("REDIS", "oee_production_db")
LIVE_TAGS_DB = config.getint("REDIS", "live_tags_db")
class KeyPath(object):
......@@ -110,3 +109,21 @@ class MQTTConf:
host = config["MQTT"]["host"]
port = int(config["MQTT"]["port"])
publish_base_topic = config["MQTT"]["publish_base_topic"]
class KafkaConf:
host = config.get('KAFKA', 'host')
port = config.get('KAFKA', 'port')
topic = config.get('KAFKA', 'topic')
if not any([topic, host, port]):
print("KAFKA env variables missing, continuing without Kafka/Kairos support")
enable_sites_partition = config.getboolean("KAFKA", "ENABLE_KAFKA_PARTITION", fallback=True)
split_key = config["KAFKA"].get('KAFKA_PARTITION_KEY', 'site_id')
round_robin_enable = config.getboolean("KAFKA", "ROUND_ROBIN_PARTITION", fallback=True)
redis_db = config.getint("KAFKA", "partition_db")
class PathToServices:
event_explorer = config["PATH_TO_SERVICES"]["event_explorer"]
metadata_proxy = config["PATH_TO_SERVICES"]["metadata_proxy"]
downtime_proxy = config["PATH_TO_SERVICES"]["downtime_proxy"]
import os
from collections import namedtuple
......@@ -59,3 +60,38 @@ class DBConstants:
db_ilens_assistant = "ilens_assistant"
collection_constants = "constants"
lookup_table = "lookup_table"
class CommonConstants:
ui = 'ui_datetime_format'
utc = 'utc_datetime_format'
nsc = 'no_special_chars_datetime_format'
umtf = 'user_meta_time_format'
__temporary_format__ = '%Y-%m-%dT%H:%M:%S+0530'
__iso_format__ = '%Y-%m-%dT%H:%M:%S%z'
__form_iso_format__ = '%Y-%m-%dT%H:%M:%S.%f%z'
__utc_datetime_format__ = '%Y-%m-%dT%H:%M:%S'
__ui_datetime_format__ = '%Y-%m-%d %H:%M:%S'
__no_special_chars_datetime_format__ = '%Y%m%d%H%M%S'
__user_meta_time_format__ = "%d-%m-%Y %H:%M:%S"
__user_meta_time_format_ws__ = "%d-%m-%Y %H:%M"
__ui_display_datetime_format__ = "%d %b %Y %H:%M"
__ui_display_date_format__ = "%d %b %Y"
__time_format__ = "%H:%M:%S"
class OEETagMappingKeys:
OEE_GOOD_COUNT_TAG = os.environ.get("OEE_GOOD_TAG", default="good_count_tag")
OEE_INSPECTION_TAG = os.environ.get("OEE_INSPECTION_TAG", default="oee_inspection_tag")
OEE_MANUAL_MODE_TAG = os.environ.get("OEE_INSPECTION_TAG", default="oee_manual_mode_tag")
OEE_AUTO_MODE_TAG = os.environ.get("OEE_INSPECTION_TAG", default="oee_auto_mode_tag")
OEE_NG_TAG = os.environ.get("OEE_NG_TAG", default="oee_ng_tag")
class CommonStatusCode:
SUCCESS_CODES = (
200,
201,
204,
)
from scripts.db.redis_connections import oee_production_db
class AutomationEngine:
def __init__(self, project_id=None):
...
@staticmethod
def get_activate_batches_from_redis():
final_dict = {}
keys = list(oee_production_db.keys())
values = oee_production_db.mget(keys)
for _value in values:
final_dict.update(zip(keys, _value))
return final_dict
......@@ -6,8 +6,8 @@ from scripts.schemas.batch_oee import ChartResponse, ChartDBResponse
def processor(data):
db_response = ChartDBResponse(**data)
db_response.total_time = (
db_response.batch_end_time - db_response.batch_start_time
) / 60000
db_response.batch_end_time - db_response.batch_start_time
) / 60000
db_response.actual_cycle = round(
db_response.total_units / db_response.total_time, 2
)
......@@ -20,7 +20,7 @@ def processor(data):
return chart_response.dict()
def aggregator(data, activity_length):
def aggregator(data, activity_length=1):
df = pd.DataFrame(data)
df["total_time"] = (df["batch_end_time"] - df["batch_start_time"]) / 60000
df["actual_cycle"] = df["total_units"] / df["total_time"]
......
import time
from scripts.errors import ErrorCodes
from datetime import datetime
from scripts.constants import UOM
from scripts.errors import ErrorCodes
from scripts.logging import logger
from scripts.schemas.batch_oee import BatchOEEDataSaveRequest, BatchOEEData
from scripts.schemas.batch_oee import OEEDataSaveRequest, BatchOEEData
from scripts.utils.common_utils import CommonUtils
class OEECalculator:
def __init__(self):
self.common_utils = CommonUtils()
@staticmethod
async def calculate_availability(operating_time, planned_prod_time):
def calculate_availability(operating_time, planned_prod_time):
if operating_time > planned_prod_time:
logger.error(ErrorCodes.ERR001)
raise ValueError(ErrorCodes.ERR001)
......@@ -20,7 +23,7 @@ class OEECalculator:
raise
@staticmethod
async def calculate_performance(units_produced, cycle_time, operating_time):
def calculate_performance(units_produced, cycle_time, operating_time):
try:
if cycle_time == 0 or operating_time == 0:
logger.error(ErrorCodes.ERR002)
......@@ -35,7 +38,7 @@ class OEECalculator:
raise
@staticmethod
async def calculate_productive_time(units_produced, cycle_time):
def calculate_productive_time(units_produced, cycle_time):
try:
if cycle_time == 0:
logger.error(ErrorCodes.ERR002)
......@@ -47,7 +50,7 @@ class OEECalculator:
raise
@staticmethod
async def calculate_quality(rejected_units, total_units):
def calculate_quality(rejected_units, total_units):
if rejected_units > total_units:
logger.error(ErrorCodes.ERR004)
raise ValueError(ErrorCodes.ERR004)
......@@ -60,7 +63,7 @@ class OEECalculator:
raise
@staticmethod
async def calculate_oee(availability, performance, quality):
def calculate_oee(availability, performance, quality):
try:
return availability * performance * quality
except Exception as e:
......@@ -70,16 +73,16 @@ class OEECalculator:
class OEELossesCalculator:
@staticmethod
async def calculate_availability_loss(downtime, available_time):
def calculate_availability_loss(downtime, available_time):
return (downtime / available_time) * 100
@staticmethod
async def calculate_quality_loss(reject_units, cycle_time, available_time):
def calculate_quality_loss(reject_units, cycle_time, available_time):
return ((reject_units * (1 / cycle_time)) / available_time) * 100
@staticmethod
async def calculate_performance_loss(
oee_percentage, availability_loss, quality_loss
def calculate_performance_loss(
oee_percentage, availability_loss, quality_loss
):
return 100 - availability_loss - quality_loss - oee_percentage
......@@ -88,71 +91,71 @@ class OEEEngine:
def __init__(self):
self.oee_calc = OEECalculator()
self.oee_loss_calc = OEELossesCalculator()
self.common_util = CommonUtils()
async def start_batch_oee_calc(
def start_batch_oee_calc(
self,
product_info: BatchOEEDataSaveRequest
request_data: OEEDataSaveRequest
) -> BatchOEEData:
try:
logger.debug(f"Calculating OEE for {product_info.batch_id}")
logger.debug(f"Calculating OEE for {request_data.reference_id}")
# Start and End time should be in milliseconds since epoch.
if product_info.uom == UOM.minutes:
if request_data.uom == UOM.minutes:
divisor = UOM.time_divs.minutes
elif product_info.uom == UOM.seconds:
elif request_data.uom == UOM.seconds:
divisor = UOM.time_divs.seconds
elif product_info.uom == UOM.hours:
elif request_data.uom == UOM.hours:
divisor = UOM.time_divs.hours
elif product_info.uom == UOM.millis:
elif request_data.uom == UOM.millis:
divisor = UOM.time_divs.millis
else:
divisor = UOM.time_divs.minutes
planned_production_time = (
product_info.batch_end_time - product_info.batch_start_time
) / divisor
planned_production_time = self.common_util.get_duration(tz=request_data.tz, meta=request_data.dict(),
difference=True) / divisor
operating_time = planned_production_time - product_info.downtime
operating_time = planned_production_time - request_data.downtime
availability = await self.oee_calc.calculate_availability(
availability = self.oee_calc.calculate_availability(
operating_time=operating_time,
planned_prod_time=planned_production_time,
)
performance = await self.oee_calc.calculate_performance(
units_produced=product_info.total_units,
performance = self.oee_calc.calculate_performance(
units_produced=request_data.total_units,
operating_time=operating_time,
cycle_time=product_info.cycle_time,
cycle_time=request_data.cycle_time,
)
quality = await self.oee_calc.calculate_quality(
total_units=product_info.total_units,
rejected_units=product_info.reject_units,
quality = self.oee_calc.calculate_quality(
total_units=request_data.total_units,
rejected_units=request_data.reject_units,
)
oee = await self.oee_calc.calculate_oee(
oee = self.oee_calc.calculate_oee(
availability=availability,
performance=performance,
quality=quality,
)
productive_time = await self.oee_calc.calculate_productive_time(
cycle_time=product_info.cycle_time,
units_produced=product_info.total_units,
productive_time = self.oee_calc.calculate_productive_time(
cycle_time=request_data.cycle_time,
units_produced=request_data.total_units,
)
availability_loss = await self.oee_loss_calc.calculate_availability_loss(
downtime=product_info.downtime,
availability_loss = self.oee_loss_calc.calculate_availability_loss(
downtime=request_data.downtime,
available_time=planned_production_time,
)
quality_loss = await self.oee_loss_calc.calculate_quality_loss(
reject_units=product_info.reject_units,
quality_loss = self.oee_loss_calc.calculate_quality_loss(
reject_units=request_data.reject_units,
available_time=planned_production_time,
cycle_time=product_info.cycle_time,
cycle_time=request_data.cycle_time,
)
performance_loss = await self.oee_loss_calc.calculate_performance_loss(
performance_loss = self.oee_loss_calc.calculate_performance_loss(
oee_percentage=oee * 100,
availability_loss=availability_loss,
quality_loss=quality_loss,
......@@ -170,12 +173,12 @@ class OEEEngine:
"performance_loss": performance_loss,
}
logger.debug(f"OEE: {product_info.batch_id}: {oee_dict}")
logger.debug(f"OEE Loss: {product_info.batch_id}: {oee_loss}")
logger.debug(f"OEE: {request_data.reference_id}: {oee_dict}")
logger.debug(f"OEE Loss: {request_data.reference_id}: {oee_loss}")
batch_oee = BatchOEEData(
**product_info.dict(),
calculated_on=int(time.time() * 1000),
**request_data.dict(),
calculated_on=datetime.now().astimezone(tz=request_data.tz).isoformat(),
productive_time=productive_time,
availability=availability * 100,
performance=performance * 100,
......
......@@ -3,9 +3,9 @@ import time
from sqlalchemy.orm import Session
from scripts.core.engine.chart_creators import ChartMaker
from scripts.core.engine.oee_aggregator import processor, aggregator
from scripts.db_layer import batch_oee_table
from scripts.logging.logging import logger
from scripts.core.engine.oee_aggregator import processor, aggregator
from scripts.db.psql.oee_discrete import DiscreteOEE
from scripts.logging import logger
from scripts.schemas.batch_oee import (
GetOEERequest,
GetOEERequestOneBatch,
......@@ -14,63 +14,57 @@ from scripts.schemas.batch_oee import (
ChartRequest,
)
from scripts.schemas.meta import LabelValue
from scripts.utils.mongo_util import MongoConnect
class APIHandler:
@staticmethod
async def get_oee_all(db: Session, get_oee_request: GetOEERequest):
def get_oee_all(db: Session, get_oee_request: GetOEERequest):
table_obj = DiscreteOEE(db=db)
try:
table_obj = batch_oee_table.BatchOEETable(db=db)
data = table_obj.get_oee_data_all(
start_time=get_oee_request.start_time,
end_time=get_oee_request.end_time,
prod_start_time=get_oee_request.prod_start_time,
prod_end_time=get_oee_request.prod_end_time,
hierarchy=get_oee_request.hierarchy,
)
return data
except Exception:
raise
finally:
del table_obj
@staticmethod
async def get_oee_batch(db: Session, get_oee_request: GetOEERequestOneBatch):
def get_oee_batch(db: Session, get_oee_request: GetOEERequestOneBatch):
table_obj = DiscreteOEE(db=db)
try:
table_obj = batch_oee_table.BatchOEETable(db=db)
data = table_obj.get_oee_data_batch_id(
hierarchy=get_oee_request.hierarchy, batch_id=get_oee_request.batch_id
data = table_obj.get_oee_data_by_reference_id(
hierarchy=get_oee_request.hierarchy, reference_id=get_oee_request.reference_id,
project_id=get_oee_request.project_id
)
return data
except Exception:
raise
finally:
del table_obj
@staticmethod
async def get_batches(db: Session, request_data: GetBatches):
def get_batches(db: Session, request_data: GetBatches):
table_obj = DiscreteOEE(db=db)
try:
table_obj = batch_oee_table.BatchOEETable(db=db)
if not request_data.end_time:
request_data.end_time = int(time.time() * 1000)
if not request_data.prod_end_time:
request_data.prod_end_time = int(time.time() * 1000)
data = table_obj.get_batches(
hierarchy=request_data.hierarchy,
start_time=request_data.start_time,
end_time=request_data.end_time,
prod_start_time=request_data.prod_start_time,
prod_end_time=request_data.prod_end_time,
)
return data
except Exception:
raise
finally:
del table_obj
@staticmethod
async def get_products(db: Session, request_data: GetProducts):
table_obj = DiscreteOEE(db=db)
try:
table_obj = batch_oee_table.BatchOEETable(db=db)
data = table_obj.get_products(
hierarchy=request_data.hierarchy,
start_time=request_data.queryDate[0],
end_time=request_data.queryDate[1],
prod_start_time=request_data.queryDate[0],
prod_end_time=request_data.queryDate[1],
)
return [
......@@ -86,21 +80,19 @@ class APIHandler:
except Exception as e:
logger.exception(e, exc_info=True)
raise
finally:
del table_obj
@staticmethod
async def get_chart_data(db: Session, request_data: ChartRequest):
table_obj = DiscreteOEE(db=db)
try:
table_obj = batch_oee_table.BatchOEETable(db=db)
if not request_data.hierarchy:
return dict()
chart_maker = ChartMaker()
data = table_obj.get_chart_data(
hierarchy=request_data.hierarchy,
start_time=request_data.queryDate[0],
end_time=request_data.queryDate[1],
product_id=request_data.productId,
prod_start_time=request_data.queryDate[0],
prod_end_time=request_data.queryDate[1],
reference_id=request_data.productId,
aggregation=request_data.aggregation,
)
if not request_data.aggregation or len(data) == 1:
......
import json
from datetime import datetime
import pytz
from sqlalchemy.orm import Session
from scripts.constants import ResponseCodes
from scripts.constants import ResponseCodes, CommonConstants
from scripts.core.engine.oee_calculator import OEEEngine
from scripts.db.psql.oee_discrete import DiscreteOEE
from scripts.errors import ILensError, ErrorCodes
from scripts.db.redis_connections import oee_production_db
from scripts.logging import logger
from scripts.schemas.batch_oee import BatchOEEDataRequest, BatchOEEData, GetOEERequestOneBatch, BatchOEEDataSaveRequest
from scripts.schemas.batch_oee import OEEDataInsertRequest, BatchOEEData, OEEDataSaveRequest
from scripts.schemas.response_models import DefaultResponse
oee_engine = OEEEngine()
......@@ -13,25 +17,37 @@ oee_engine = OEEEngine()
class CalculateBatchOEEHandler:
async def calculate_oee(self, db, product_info: BatchOEEDataRequest):
def calculate_oee(self, db, request_data: OEEDataInsertRequest):
table_obj = DiscreteOEE(db=db)
try:
record_presence = table_obj.get_oee_data_batch_id(batch_id=product_info.batch_id,
hierarchy=product_info.hierarchy)
record_presence = table_obj.get_oee_data_by_reference_id(reference_id=request_data.reference_id,
hierarchy=request_data.hierarchy,
project_id=request_data.project_id)
redis_key = f"{request_data.project_id}${request_data.reference_id}"
if not record_presence:
if not product_info.batch_start_time:
raise ILensError(ErrorCodes.ERR005)
product_info = BatchOEEDataSaveRequest(**product_info.dict(exclude_none=True))
oee_calculation = await oee_engine.start_batch_oee_calc(product_info=product_info)
await self.save_oee_data(oee_calculation, db)
if not request_data.prod_start_time:
request_data.prod_start_time = datetime.now().astimezone(
tz=pytz.timezone(request_data.tz)).isoformat()
request_data = OEEDataSaveRequest(**request_data.dict(exclude_none=True))
oee_calculation = oee_engine.start_batch_oee_calc(request_data=request_data)
self.save_oee_data(oee_calculation, db)
oee_production_db.set(name=redis_key, value=json.dumps(
{"created_on": datetime.now().strftime(CommonConstants.__ui_datetime_format__)}))
response = DefaultResponse(
status=ResponseCodes.SUCCESS,
data=oee_calculation,
message="OEE saved Successfully",
)
return response
status = await self.update_oee_data(product_info.dict(exclude_none=True), record_presence, db)
status = self.update_oee_data(oee_data=BatchOEEData(**request_data.dict(exclude_none=True)),
old_record=record_presence, db=db)
if status:
if request_data.prod_end_time:
oee_production_db.delete(redis_key)
else:
oee_production_db.set(name=redis_key,
value=json.dumps(BatchOEEData(**request_data.dict(exclude_none=True))))
response = DefaultResponse(
status=ResponseCodes.SUCCESS,
data=status,
......@@ -41,32 +57,35 @@ class CalculateBatchOEEHandler:
except Exception as e:
logger.exception(f"Exception while saving oee record: {e}")
raise e
finally:
del table_obj
@staticmethod
async def save_oee_data(oee_data: BatchOEEData, db: Session):
def save_oee_data(oee_data: BatchOEEData, db: Session):
table_obj = DiscreteOEE(db=db)
try:
oee_table = BatchOEETable(**oee_data.dict())
table_obj = batch_oee_table.BatchOEETable(db=db)
table_obj.add_data(oee_table)
table_obj.insert_one(table=table_obj.table, insert_json=oee_data.dict())
return True
except Exception as e:
raise e
finally:
del table_obj
@staticmethod
async def update_oee_data(product_info: dict, old_record: dict, db: Session):
def update_oee_data(oee_data: BatchOEEData, old_record: dict, db: Session):
table_obj = DiscreteOEE(db=db)
try:
table_obj = batch_oee_table.BatchOEETable(db=db)
old_record.update(**product_info)
oee_calculation = await oee_engine.start_batch_oee_calc(
product_info=BatchOEEDataSaveRequest(**old_record))
filters = GetOEERequestOneBatch(batch_id=product_info["batch_id"], hierarchy=product_info["hierarchy"])
table_obj.update_record(filters=filters, update_obj=oee_calculation.dict())
old_record.update(**oee_data.dict(exclude_none=True))
oee_calculation = oee_engine.start_batch_oee_calc(
request_data=OEEDataSaveRequest(**old_record))
table_obj.update(update_json=oee_calculation.dict(), table=table_obj.table, filters=[{'expression': 'eq',
'column': table_obj.table.project_id,
'value': oee_data.project_id
},
{'expression': 'eq',
'column': table_obj.table.reference_id,
'value': oee_data.reference_id
},
{'expression': 'eq',
'column': table_obj.table.hierarchy,
'value': oee_data.hierarchy
}], update_one=True)
return True
except Exception as e:
raise e
finally:
del table_obj
......@@ -7,7 +7,7 @@ from sqlalchemy import create_engine
from scripts.config import DBConf
from scripts.config import Metadata
from scripts.core.engine.automation_engine import AutomationEngine
from scripts.core.handlers.batch_oee_calc_handler import CalculateBatchOEEHandler, BatchOEEDataRequest
from scripts.core.handlers.batch_oee_calc_handler import CalculateBatchOEEHandler, OEEDataInsertRequest
from scripts.db.mongo.dbs.siam_oee import SiamOEE
from scripts.db_layer.job_table import JobTable
from scripts.logging.logging import logger as logging
......@@ -83,15 +83,15 @@ class FormHandler:
table_data = JobTable(db_session)
job_model, db_data = await self.get_job_data(request_data)
table_data.update_record(job_model.job, job_model.uf_process, db_data.dict(exclude_none=True))
calculate_oee_payload = CalculateOEE(batch_start_time=job_model.start_time,
batch_end_time=job_model.end_time,
calculate_oee_payload = CalculateOEE(batch_start_time=job_model.prod_start_time,
batch_end_time=job_model.prod_end_time,
batch_id=job_model.job,
setup_time=job_model.setup_time,
cycle_time=job_model.cycle_time,
total_units=job_model.qty_released)
calculate_oee_payload.downtime = await self.get_oee_downtime(request_data.submitted_data["data"],
job_model.end_time, job_model.tz)
_ = await CalculateBatchOEEHandler().calculate_oee(db_session, BatchOEEDataRequest(
job_model.prod_end_time, job_model.tz)
_ = await CalculateBatchOEEHandler().calculate_oee(db_session, OEEDataInsertRequest(
**calculate_oee_payload.dict()))
form_response = await self.save_to_form(request_data, request_cookies, job_model)
logging.info(f"FORM SAVE RESPONSE, {form_response}")
......@@ -118,10 +118,10 @@ class FormHandler:
async def save_to_form(self, request_data: EndProductionRequest,
request_cookies,
job_model: EndProdJobModel):
end_date_time = datetime.fromtimestamp(job_model.end_time // 1000, tz=pytz.timezone(request_data.tz))
end_date_time = datetime.fromtimestamp(job_model.prod_end_time // 1000, tz=pytz.timezone(request_data.tz))
end_str = end_date_time.strftime("%Y-%m-%d %H:%M")
start_date_time = datetime.fromtimestamp(job_model.start_time // 1000, tz=pytz.timezone(request_data.tz))
start_date_time = datetime.fromtimestamp(job_model.prod_start_time // 1000, tz=pytz.timezone(request_data.tz))
start_str = start_date_time.strftime("%Y-%m-%d %H:%M")
tag_data = self.automation_engine.get_all_tags(end_date_time)
......
# Copyright (c) NOIR
from sqlalchemy import Column, Integer, String, BigInteger, Float
from sqlalchemy import Column, Integer, String, Float, TIMESTAMP
from scripts.db.psql.databases import Base
......@@ -9,13 +8,15 @@ class OEEDiscreteTable(Base):
id = Column(Integer, autoincrement=True, primary_key=True)
hierarchy = Column(String)
process = Column(String, nullable=True)
reference = Column(String)
prod_start_time = Column(BigInteger)
prod_end_time = Column(BigInteger)
prod_status = Column(String, default="running")
reference_id = Column(String, nullable=True, unique=True)
prod_start_time = Column(TIMESTAMP(timezone=True), nullable=False)
prod_end_time = Column(TIMESTAMP(timezone=True), nullable=True)
total_downtime = Column(Float, default=0)
cycle_time = Column(Float)
planned_units = Column(Float, nullable=False)
total_units = Column(Float)
reject_units = Column(Float, default=0)
cycle_time = Column(Float)
productive_time = Column(Float, default=0)
availability = Column(Float)
performance = Column(Float)
......@@ -24,6 +25,8 @@ class OEEDiscreteTable(Base):
quality_loss = Column(Float)
performance_loss = Column(Float)
oee = Column(Float)
calculated_on = Column(BigInteger)
calculated_on = Column(TIMESTAMP(timezone=True), nullable=False)
uom = Column(String, default="mins")
setup_time = Column(Float, default=0)
trigger_by = Column(String, nullable=False)
project_id = Column(String, nullable=False)
from typing import Any, Optional, List
from scripts.constants import CommonKeys
from scripts.constants import DBConstants
from scripts.db.mongo.schema import MongoBaseSchema
from scripts.utils.mongo_util import MongoCollectionBaseClass
class LookupTableSchema(MongoBaseSchema):
"""
This is the Schema for the Mongo DB Collection.
All datastore and general responses will be following the schema.
"""
lookup_id: Optional[str]
lookup_name: Optional[str]
lookup_data: Optional[List]
project_id: Optional[Any]
description: Optional[str]
class LookupTable(MongoCollectionBaseClass):
def __init__(self, mongo_client, project_id=None):
super().__init__(mongo_client, database=DBConstants.db_metadata,
collection=DBConstants.lookup_table)
self.project_id = project_id
@property
def key_type(self):
return CommonKeys.KEY_TYPE
def find_constant_by_dict(self, _type):
"""
The following function will give one record for a given set of
search parameters as keyword arguments
:param _type:
:return:
"""
record = self.find_one(query={self.key_type: _type})
if not record:
return dict(record)
return dict(record)
def find_lookup_dict(self, lookup_name, project_id, filter_dict=None):
query = {self.key_type: lookup_name, "project_id": project_id}
record = self.find_one(query=query, filter_dict=filter_dict)
if not record:
return dict()
return dict(record)
def map_lookup_keys(self, lookup_name, project_id):
query = {self.key_type: lookup_name, "project_id": project_id}
_record = self.find_one(query=query)
if not _record:
return dict()
return {record["lookupdata_id"]: record["lookup_value"] for record in _record["lookup_data"]}
def insert_one_constant(self, data):
"""
The following function will insert one tag in the
tags collections
:param self:
:param data:
:return:
"""
return self.insert_one(data)
def find_constant_by_content(self, content_type):
"""
The following function will give one record for a given set of
search parameters as keyword arguments
"""
query = {"content_type": content_type}
search_option = {"data": 1}
record = self.find_one(query=query, filter_dict=search_option)
if not record:
return dict()
return record
from typing import Optional,List
from pydantic import BaseModel
class MongoBaseSchema(BaseModel):
pass
class BaseRequestSchema(BaseModel):
"""
This is base schema for input requests to the Collection Class
"""
......@@ -4,7 +4,7 @@ from sqlalchemy import create_engine
from sqlalchemy_utils import database_exists, create_database
from scripts.config import DBConf
from scripts.db.db_models import DownTimeCategory, DownTimeLog, DownTimeMasterError
from scripts.db.db_models import OEEDiscreteTable
from scripts.logging import logger
engine = create_engine(DBConf.POSTGRES_URI)
......@@ -14,9 +14,7 @@ def create_default_psql_dependencies():
try:
if not database_exists(engine.url):
create_database(engine.url)
DownTimeCategory.__table__.create(bind=engine, checkfirst=True)
DownTimeMasterError.__table__.create(bind=engine, checkfirst=True)
DownTimeLog.__table__.create(bind=engine, checkfirst=True)
OEEDiscreteTable.__table__.create(bind=engine, checkfirst=True)
except Exception as e:
logger.error(f"Error occurred while creating: {e}", exc_info=True)
......
......@@ -12,7 +12,7 @@ class DiscreteOEE(SQLDBUtils):
super().__init__(db)
self.table = OEEDiscreteTable
def get_oee_data_all(self, start_time, end_time, hierarchy):
def get_oee_data_all(self, prod_start_time, prod_end_time, hierarchy):
try:
data = (
......@@ -20,8 +20,8 @@ class DiscreteOEE(SQLDBUtils):
.order_by(self.table.calculated_on)
.filter(
self.table.hierarchy == hierarchy,
self.table.calculated_on >= start_time,
self.table.calculated_on <= end_time,
self.table.calculated_on >= prod_start_time,
self.table.calculated_on <= prod_end_time,
)
)
if data:
......@@ -32,13 +32,14 @@ class DiscreteOEE(SQLDBUtils):
logger.exception(e)
raise
def get_oee_data_batch_id(self, batch_id, hierarchy):
def get_oee_data_by_reference_id(self, reference_id, hierarchy, project_id):
try:
data = (
self.session.query(self.table)
.order_by(self.table.calculated_on)
.filter(
self.table.hierarchy == hierarchy, self.table.batch_id == batch_id
self.table.hierarchy == hierarchy, self.table.reference_id == reference_id,
self.table.project_id == project_id
)
.first()
)
......@@ -50,26 +51,26 @@ class DiscreteOEE(SQLDBUtils):
logger.exception(e)
raise
def get_batches(self, hierarchy, start_time, end_time):
def get_batches(self, hierarchy, prod_start_time, prod_end_time):
try:
data = (
self.session.query(self.table.batch_id)
.order_by(self.table.calculated_on)
.filter(
self.table.hierarchy == hierarchy,
self.table.calculated_on >= start_time,
self.table.calculated_on <= end_time,
self.table.calculated_on >= prod_start_time,
self.table.calculated_on <= prod_end_time,
)
)
if data:
return [getattr(i, self.column_batch_id) for i in data]
return [getattr(i, self.table.reference_id) for i in data]
else:
return list()
except Exception as e:
logger.exception(e)
raise
def get_products(self, hierarchy, start_time, end_time):
def get_products(self, hierarchy, prod_start_time, prod_end_time):
try:
data = (
self.session.query(
......@@ -80,8 +81,8 @@ class DiscreteOEE(SQLDBUtils):
.order_by(self.table.calculated_on)
.filter(
self.table.hierarchy == hierarchy,
self.table.batch_start_time >= start_time,
self.table.batch_end_time <= end_time,
self.table.batch_start_time >= prod_start_time,
self.table.batch_end_time <= prod_end_time,
)
)
if data:
......@@ -93,7 +94,7 @@ class DiscreteOEE(SQLDBUtils):
raise
def get_chart_data(
self, start_time, end_time, hierarchy, product_id, aggregation=False
self, prod_start_time, prod_end_time, hierarchy, reference_id, aggregation=False
):
try:
if not aggregation:
......@@ -101,9 +102,9 @@ class DiscreteOEE(SQLDBUtils):
self.session.query(self.table)
.filter(
self.table.hierarchy == hierarchy,
self.table.batch_id == product_id,
self.table.batch_start_time >= start_time,
self.table.batch_end_time <= end_time,
self.table.reference_id == reference_id,
self.table.prod_start_time >= prod_start_time,
self.table.prod_end_time <= prod_end_time,
)
.first()
)
......@@ -114,8 +115,8 @@ class DiscreteOEE(SQLDBUtils):
self.session.query(self.table)
.filter(
self.table.hierarchy == hierarchy,
self.table.batch_start_time >= start_time,
self.table.batch_end_time <= end_time,
self.table.prod_start_time >= prod_start_time,
self.table.prod_end_time <= prod_end_time,
)
.options(
defer(self.table.hierarchy),
......
import redis
from scripts.config import RedisConfig
from scripts.config import RedisConfig, KafkaConf
login_db = redis.from_url(RedisConfig.uri, db=int(RedisConfig.login_db), decode_responses=True)
login_db = redis.from_url(RedisConfig.REDIS_URI, db=int(RedisConfig.LOGIN_DB), decode_responses=True)
project_details_db = redis.from_url(RedisConfig.uri, db=int(RedisConfig.project_tags_db), decode_responses=True)
project_details_db = redis.from_url(RedisConfig.REDIS_URI, db=int(RedisConfig.PROJECT_TAGS_DB), decode_responses=True)
downtime_db = redis.from_url(RedisConfig.uri, db=int(RedisConfig.downtime_db), decode_responses=True)
downtime_db = redis.from_url(RedisConfig.REDIS_URI, db=int(RedisConfig.DOWNTIME_DB), decode_responses=True)
oee_production_db = redis.from_url(RedisConfig.REDIS_URI, db=int(RedisConfig.OEE_PRODUCTION_DB), decode_responses=True)
live_tags_db = redis.from_url(RedisConfig.REDIS_URI, db=int(RedisConfig.LIVE_TAGS_DB), decode_responses=True)
partition_db = redis.from_url(RedisConfig.REDIS_URI, db=int(KafkaConf.redis_db), decode_responses=True)
\ No newline at end of file
from typing import Optional, Union, List
import time
from pydantic import BaseModel, validator
# from scripts.utils.common_utils import CommonUtils
#
# common_utils = CommonUtils()
class GetProducts(BaseModel):
queryDate: List[int]
......@@ -90,34 +94,95 @@ class ProductInfo(BaseModel):
reject_units: Optional[Union[float, int]] = 0
class BatchOEEDataRequest(BaseModel):
batch_start_time: Optional[int]
batch_end_time: Optional[int]
class OEEDataInsertRequest(BaseModel):
prod_start_time: Optional[str]
prod_end_time: Optional[str]
prod_status: Optional[str] = "running"
downtime: Optional[Union[float, int]]
hierarchy: Optional[str]
batch_id: Optional[str]
reference_id: Optional[str]
setup_time: Optional[Union[float, int]]
cycle_time: Optional[Union[float, int]]
total_units: Optional[Union[float, int]]
reject_units: Optional[Union[float, int]]
tz: Optional[str] = 'Asia/Kolkata'
project_id: str
uom: Optional[str]
class BatchOEEDataSaveRequest(BaseModel):
batch_start_time: Optional[int]
batch_end_time: Optional[int] = int(time.time() * 1000)
class Config:
schema_extra = {
"example": {
"hierarchy": "site_100$dept_100$line_100$equipment_100",
"prod_start_time": "2022-04-22 19:49:00",
"prod_end_time": "2022-04-22 19:49:00",
"tz": "Asia/Kolkata",
"project_id": "project_099",
"reference_id": "reference_id",
"downtime": "",
"setup_time": "",
"cycle_time": "",
"total_units": "",
"reject_units": "",
"uom": "'"
}
}
# @validator('prod_start_time')
# def date_format_validator_start_date(cls, v):
# common_utils.check_date_format(v, "%Y-%m-%d %H:%M:%S")
# return v
#
# @validator('prod_end_time')
# def date_format_validator_end_date(cls, v):
# common_utils.check_date_format(v, "%Y-%m-%d %H:%M:%S")
# return v
class OEEDataSaveRequest(BaseModel):
prod_start_time: Optional[str]
prod_end_time: Optional[str]
downtime: Optional[Union[float, int]] = 0
hierarchy: Optional[str]
batch_id: Optional[str]
reference_id: Optional[str]
setup_time: Optional[Union[float, int]] = 0
cycle_time: Union[float, int]
cycle_time: Union[float, int] = 3
total_units: Optional[Union[float, int]] = 0
reject_units: Optional[Union[float, int]] = 0
uom: Optional[str] = "mins"
tz: Optional[str] = 'Asia/Kolkata'
project_id: str
class BatchOEEData(BatchOEEDataRequest):
calculated_on: int
class Config:
schema_extra = {
"example": {
"hierarchy": "site_100$dept_100$line_100$equipment_100",
"prod_start_time": "2022-04-22 19:49:00",
"prod_end_time": "2022-04-22 19:49:00",
"tz": "Asia/Kolkata",
"project_id": "project_099",
"reference_id": "reference_id",
"downtime": "",
"setup_time": "",
"cycle_time": "",
"total_units": "",
"reject_units": "",
"uom": "'"
}
}
# @validator('prod_start_time')
# def date_format_validator_start_date(cls, v):
# common_utils.check_date_format(v, "%Y-%m-%d %H:%M:%S")
# return v
#
# @validator('prod_end_time')
# def date_format_validator_end_date(cls, v):
# common_utils.check_date_format(v, "%Y-%m-%d %H:%M:%S")
# return v
class BatchOEEData(OEEDataInsertRequest):
calculated_on: str
productive_time: float
availability: float
performance: float
......@@ -132,14 +197,15 @@ class BatchOEEData(BatchOEEDataRequest):
class GetOEERequest(BaseModel):
start_time: int
end_time: int
prod_start_time: int
prod_end_time: int
hierarchy: str
class GetOEERequestOneBatch(BaseModel):
hierarchy: str
batch_id: str
reference_id: str
project_id: str
class GetBatches(GetOEERequest):
......
import json
import traceback
from fastapi import APIRouter, Depends
from sqlalchemy.orm import Session
from pydantic import ValidationError
from sqlalchemy.orm import Session
from scripts.constants import Endpoints
from scripts.core.handlers.batch_oee_calc_handler import CalculateBatchOEEHandler
from scripts.db.psql.databases import get_db
from scripts.errors import ILensError
from scripts.logging import logger
from scripts.schemas.batch_oee import BatchOEEDataRequest
from scripts.schemas.batch_oee import OEEDataInsertRequest
from scripts.schemas.response_models import DefaultFailureResponse
from scripts.errors import ILensError
import json
batch_oee_handler = CalculateBatchOEEHandler()
calc_oee_router = APIRouter(prefix=Endpoints.calc_oee_base, tags=["OEE Calculator"])
@calc_oee_router.post(Endpoints.calculate_batch_oee)
async def calculate_oee_for_batch(
product_info: BatchOEEDataRequest, db: Session = Depends(get_db)
def calculate_oee_for_batch(
product_info: OEEDataInsertRequest, db: Session = Depends(get_db)
):
try:
return await batch_oee_handler.calculate_oee(product_info=product_info, db=db)
return batch_oee_handler.calculate_oee(request_data=product_info, db=db)
except ILensError as error_code:
return DefaultFailureResponse(error=error_code.args[0])
except ValidationError as e:
......@@ -36,10 +36,10 @@ async def calculate_oee_for_batch(
@calc_oee_router.post(Endpoints.update_batch_oee)
def calculate_oee_for_batch(
product_info: BatchOEEDataRequest, db: Session = Depends(get_db)
product_info: OEEDataInsertRequest, db: Session = Depends(get_db)
):
try:
return batch_oee_handler.calculate_oee(product_info=product_info, db=db)
return batch_oee_handler.calculate_oee(request_data=product_info, db=db)
except ValidationError as e:
return DefaultFailureResponse(error=json.loads(e.json()))
except Exception as e:
......
import traceback
from fastapi import APIRouter, Request, Cookie
from scripts.constants import Endpoints, ResponseCodes
from scripts.core.handlers.meta_handler import MetaHandler
from scripts.logging.logging import logger
from scripts.schemas.meta import GetHierarchyRequest
from scripts.schemas.response_models import DefaultFailureResponse, DefaultResponse
meta_handler = MetaHandler()
meta_service_router = APIRouter(prefix=Endpoints.api_hierarchy, tags=["Meta Services"])
@meta_service_router.post(Endpoints.api_get)
async def find_hierarchy(get_hierarchy_request: GetHierarchyRequest, request: Request):
try:
data = await meta_handler.find_hierarchy(
get_hierarchy_request=get_hierarchy_request, request=request
)
return DefaultResponse(
data=data,
status=ResponseCodes.SUCCESS,
message="Hierarchies listed successfully",
)
except Exception as e:
tb = traceback.format_exc()
logger.exception(e)
logger.exception(tb)
return DefaultFailureResponse(error=e.args)
......@@ -6,7 +6,7 @@ import httpx
import requests
from aiohttp import ClientResponse
from scripts.constants.app_constants import CommonStatusCode, Secrets
from scripts.constants import CommonStatusCode, Secrets
from scripts.logging import logger
from scripts.utils.security_utils.apply_encrytion_util import create_token
......
from datetime import datetime
import pendulum
from scripts.config import PathToServices
from scripts.constants import Secrets
from scripts.logging import logger
from scripts.utils.auth_util import ILensRequest, AuthenticationError
from scripts.utils.security_utils.apply_encrytion_util import create_token
class CommonUtils:
@staticmethod
def check_date_format(date_time_str, time_format):
try:
date_time = datetime.strptime(date_time_str, time_format)
except ValueError:
raise ValueError("Invalid Date Format")
if date_time_str != date_time.strftime(time_format):
raise ValueError("Invalid Date Format")
@staticmethod
def get_duration(tz, meta: dict, difference=False):
try:
from_time = meta.get("prod_start_time")
if not from_time:
return ""
if not meta.get("prod_end_time"):
to_time = pendulum.now(tz=tz)
else:
to_time = meta.get("prod_end_time")
to_time = pendulum.parse(to_time, tz=tz)
from_time = pendulum.parse(from_time, tz=tz)
diff = to_time - from_time
if difference:
return diff
return f"{int(diff.in_hours())} hours {int(diff.minutes)} minutes"
except Exception as e:
logger.exception(f"Exception in getting data: {e}")
raise
def get_downtime_details_by_hierarchy(self, hierarchy, project_id, user_id=None):
connection_obj = ILensRequest(url=PathToServices.downtime_proxy,
project_id=project_id)
try:
cookies = {'login-token': self.create_token(user_id=user_id), "user_id": user_id}
downtime_response = connection_obj.post(path="/downtime_log/get/overall_downtime",
json={"project_id": project_id,
"hierarchy": hierarchy})
response = downtime_response.json()
return response.get("data", 0)
except AuthenticationError:
logger.exception(f"Authentication Error when trying to connect with downtime module")
return 0
except Exception as e:
logger.exception(f'{e.args}')
return 0
@staticmethod
def create_token(host: str = '127.0.0.1', user_id=None, internal_token=Secrets.token, age=2):
"""
This method is to create a cookie
"""
try:
if user_id is None:
user_id = "user_099"
new_token = create_token(
user_id=user_id,
ip=host,
token=internal_token,
age=age
)
return new_token
except Exception as e:
logger.exception(str(e))
raise
import json
from functools import lru_cache
from scripts.db.redis_connections import project_details_db
@lru_cache()
def get_db_name(redis_client, project_id: str, database: str, delimiter="__"):
......@@ -23,3 +25,16 @@ def get_db_name(redis_client, project_id: str, database: str, delimiter="__"):
prefix_name = val.get("source_meta", {}).get("prefix") or project_id
return f"{prefix_name}{delimiter}{database}"
return database
@lru_cache()
def get_project_specific_key(project_id=None, delimiter="__"):
if not project_id:
return ""
specific_key = ""
prefix_detail = project_details_db.get(project_id)
if prefix_detail is not None:
prefix_detail = json.loads(prefix_detail)
if prefix_detail.get('source_meta', {}).get('add_prefix_to_database', False):
specific_key = prefix_detail.get('source_meta').get('prefix') + delimiter
return specific_key
from ilens_kafka_publisher.v2 import KafkaPublisher
from scripts.config import KafkaConf
from scripts.logging import logger
class DataPush:
def __init__(self):
try:
self.obj = KafkaPublisher(kafka_host=KafkaConf.host,
kafka_port=int(KafkaConf.port),
kafka_topic=KafkaConf.topic,
redis_client=partition_db,
enable_sites_partition=KafkaConf.enable_sites_partition,
split_key=KafkaConf.split_key,
round_robin_enable=KafkaConf.round_robin_enable)
except Exception as e:
logger.error(f"Could not connect to Kafka: {e}")
def publish_message(self, msg):
try:
self.obj.perform_task(msg)
except Exception as e:
logger.debug(f"Failed to publish message - {e}")
logger.debug(f"Trying reconnect")
......@@ -7,6 +7,7 @@ from fastapi.security.api_key import APIKeyBase
from scripts.config import Service
from scripts.constants import Secrets
from scripts.db.redis_connections import login_db
from scripts.utils.security_utils.apply_encrytion_util import create_token
from scripts.utils.security_utils.jwt_util import JWT
......@@ -25,7 +26,6 @@ class CookieAuthentication(APIKeyBase):
def __init__(
self,
cookie_name: str = "login-token",
):
super().__init__()
self.model: APIKey = APIKey(**{"in": APIKeyIn.cookie}, name=cookie_name)
......@@ -57,11 +57,18 @@ class CookieAuthentication(APIKeyBase):
user_id = decoded_token.get("user_id")
_token = decoded_token.get("token")
_token_age = int(decoded_token.get("age", Secrets.LOCK_OUT_TIME_MINS))
cookie_user_id = request.cookies.get(
"user_id", request.cookies.get(
"userId", request.headers.get("userId")
)
)
if not compare_digest(Secrets.token, _token):
raise HTTPException(status_code=401)
if login_token != decoded_token.get("uid"):
raise HTTPException(status_code=401)
if not compare_digest(user_id, cookie_user_id):
raise HTTPException(status_code=401)
try:
new_token = create_token(
......@@ -69,17 +76,18 @@ class CookieAuthentication(APIKeyBase):
ip=request.client.host,
token=Secrets.token,
login_token=login_token,
age=_token_age
)
except Exception as e:
raise HTTPException(status_code=401, detail=e.args)
response.set_cookie(
'login-token',
"login-token",
new_token,
samesite='strict',
samesite="strict",
secure=Service.secure_cookie,
httponly=True,
max_age=Secrets.LOCK_OUT_TIME_MINS * 60,
)
response.headers['login-token'] = new_token
response.headers["login-token"] = new_token
return user_id
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment