Commit 9b1da609 authored by harshavardhan.c's avatar harshavardhan.c

OEE Dashboard Fixes.

parent 03e2a5aa
from scripts.utils.kafka_util import DataPush
if __name__ == '__main__': if __name__ == '__main__':
from dotenv import load_dotenv from dotenv import load_dotenv
load_dotenv() load_dotenv()
import os
import time import time
from datetime import datetime from datetime import datetime, timedelta
import pytz from scripts.constants import CommonConstants, TagCategoryConstants
from scripts.core.engine.oee_calculator import OEEEngine
from production_monitoring import ProductionMonitor from scripts.core.handlers.batch_oee_calc_handler import CalculateBatchOEEHandler
from scripts.core.handlers.common_handler import CommonHandler
prod_mon = ProductionMonitor() from scripts.logging import logger
data_push = DataPush() from scripts.schemas.batch_oee import MachineOEERequest, BatchOEEData, OEEDataInsertRequest, OEEDataSaveRequest
from scripts.utils.common_utils import CommonUtils
tag_mapping = { from scripts.utils.kafka_util import DataPush
"oee": "site_100$dept_100$line_100$equipment_101$tag_215",
"availability": "site_100$dept_100$line_100$equipment_101$tag_216",
"performance": "site_100$dept_100$line_100$equipment_101$tag_217",
"quality": "site_100$dept_100$line_100$equipment_101$tag_218",
"running_lot": "site_100$dept_100$line_100$equipment_101$tag_219",
"running_item": "site_100$dept_100$line_100$equipment_101$tag_220",
"target": "site_100$dept_100$line_100$equipment_101$tag_222",
"downtime": "site_100$dept_100$line_100$equipment_101$tag_223",
"setup_time": "site_100$dept_100$line_100$equipment_101$tag_225",
"running_time": "site_100$dept_100$line_100$equipment_101$tag_226"
}
def oee_update(): class MachineOEECalculator:
data = prod_mon.oee_mongo.find_record_by_not_status("completed") def __init__(self, project_id=None):
if not data: self.common_util = CommonUtils()
print("No jobs are running, waiting for job to start...") self.batch_oee_handler = CalculateBatchOEEHandler()
self.common_handler = CommonHandler(project_id=project_id)
self.oee_engine = OEEEngine()
self.data_push = DataPush()
def calculate_machine_oee(self, request_data: MachineOEERequest):
try:
hierarchy_dict = self.common_handler.get_valid_oee_monitoring_hierarchy(project_id=request_data.project_id)
now = datetime.today() - timedelta(days=2)
oee_start_time = datetime.strptime(request_data.monitor_time, '%H:%M').replace(year=now.year,
month=now.month,
day=now.day).strftime(
CommonConstants.USER_META_TIME_FORMAT)
oee_end_time = datetime.now().strftime(CommonConstants.USER_META_TIME_FORMAT)
for k, v in hierarchy_dict.items():
site_id = k.split("$")[0]
downtime = self.common_util.get_downtime_details_by_hierarchy(
hierarchy=k, project_id=request_data.project_id)
input_data = OEEDataInsertRequest(prod_start_time=oee_start_time,
prod_end_time=oee_end_time, downtime=downtime,
hierarchy=k, cycle_time=os.environ.get("CYCLE_TIME", default=5),
tz=request_data.tz,
project_id=request_data.project_id)
input_data.total_units, input_data.reject_units = self.batch_oee_handler.get_data_for_tags(
input_data=input_data)
oee_response: BatchOEEData = self.oee_engine.start_batch_oee_calc(
request_data=OEEDataSaveRequest(**input_data.dict()))
data_dict = {
v[TagCategoryConstants.OEE_OUTPUT_CATEGORY]: oee_response.oee,
v[TagCategoryConstants.OEE_OUTPUT_PERFORMANCE_CATEGORY]: oee_response.performance,
v[TagCategoryConstants.OEE_OUTPUT_QUALITY_CATEGORY]: oee_response.quality,
v[TagCategoryConstants.OEE_OUTPUT_QUALITY_LOSS_CATEGORY]: oee_response.quality_loss,
v[TagCategoryConstants.OEE_OUTPUT_PERFORMANCE_LOSS_CATEGORY]: oee_response.performance_loss,
v[TagCategoryConstants.OEE_OUTPUT_AVAILABILITY_CATEGORY]: oee_response.availability,
v[TagCategoryConstants.OEE_OUTPUT_AVAILABILITY_LOSS_CATEGORY]: oee_response.availability_loss
}
message_dict = {
"data": data_dict,
"site_id": site_id,
"gw_id": "",
"pd_id": "",
"p_id": request_data.project_id,
"timestamp": int(time.time() * 1000),
"msg_id": 1,
"retain_flag": False
}
self.data_push.publish_message(message_dict)
except Exception as e:
logger.exception(f"Exception Occurred while calculating oee for the hierarchy {e.args}")
return return
print(f"Calculating OEE for {data.get('job')}")
data_dict = {}
if data.get("run_start_time"):
run_start_time = datetime.fromtimestamp(
data.get("run_start_time") // 1000,
tz=pytz.timezone("Asia/Bangkok")
)
downtime = prod_mon.automation_engine.get_downtime(
run_start_time=run_start_time,
production_end_time=datetime.now(tz=pytz.timezone("Asia/Bangkok"))
)
else:
downtime = 0
oee, availability, performance, quality = prod_mon.calculate_oee_params(data, downtime)
data_dict.update(
{
tag_mapping.get("running_lot"): data.get("job", ""), # job no
tag_mapping.get("running_item"): data.get("item", ""), # item no
tag_mapping.get("target"): data.get("qty_released", 0), # quality released
tag_mapping.get("oee"): oee,
tag_mapping.get("availability"): availability,
tag_mapping.get("performance"): performance,
tag_mapping.get("quality"): quality,
tag_mapping.get("downtime"): downtime,
tag_mapping.get("setup_time"): data.get("setup_time", 0),
tag_mapping.get("running_time"): data.get("running_time", 0),
}
)
message_dict = {
"data": data_dict,
"site_id": prod_mon.settings["automation"]["site_id"],
"gw_id": "",
"pd_id": "",
"p_id": prod_mon.settings["automation"]["project_id"],
"timestamp": int(time.time() * 1000),
"msg_id": 1,
"retain_flag": False
}
data_push.publish_message(message_dict)
good_count, units_produced = prod_mon.get_current_produced_count()
running_time = (datetime.now() - datetime.fromtimestamp(data.get("start_time") / 1000)).total_seconds() / 60
mongo_data = {
"good_count": good_count,
"units_produced": units_produced,
"running_time": running_time
}
data.update(mongo_data)
prod_mon.oee_mongo.update_oee(data, data.get("job", ""), data.get("uf_process", ""), False)
if __name__ == '__main__': if __name__ == '__main__':
while True: while True:
oee_update() projects_list = os.environ.get("OEE_PROJECTS", default="project_170")
time.sleep(3) monitor_start_time = os.environ.get("OEE_START_TIME", default="00:00")
for project in projects_list.split(","):
MachineOEECalculator().calculate_machine_oee(
request_data=MachineOEERequest(project_id=project, monitor_time="00:00",
tz="Asia/Kolkata"))
time.sleep(10)
if __name__ == '__main__':
from dotenv import load_dotenv
load_dotenv()
import os
import time
from datetime import datetime, timedelta
from scripts.constants import CommonConstants, TagCategoryConstants
from scripts.core.engine.oee_calculator import OEEEngine
from scripts.core.handlers.batch_oee_calc_handler import CalculateBatchOEEHandler
from scripts.core.handlers.common_handler import CommonHandler
from scripts.logging import logger
from scripts.schemas.batch_oee import MachineOEERequest, BatchOEEData, OEEDataInsertRequest, OEEDataSaveRequest
from scripts.utils.common_utils import CommonUtils
from scripts.utils.kafka_util import DataPush
class MachineOEECalculator:
def __init__(self, project_id=None):
self.common_util = CommonUtils()
self.batch_oee_handler = CalculateBatchOEEHandler()
self.common_handler = CommonHandler(project_id=project_id)
self.oee_engine = OEEEngine()
self.data_push = DataPush()
def calculate_machine_oee(self, request_data: MachineOEERequest):
try:
hierarchy_dict = self.common_handler.get_valid_oee_monitoring_hierarchy(project_id=request_data.project_id)
now = datetime.today() - timedelta(days=1)
oee_start_time = datetime.strptime(request_data.monitor_time, '%H:%M').replace(year=now.year,
month=now.month,
day=now.day).strftime(
CommonConstants.USER_META_TIME_FORMAT)
oee_end_time = datetime.now().strftime(CommonConstants.USER_META_TIME_FORMAT)
for k, v in hierarchy_dict.items():
site_id = k.split("$")[0]
downtime = self.common_util.get_downtime_details_by_hierarchy(
hierarchy=k, project_id=request_data.project_id)
input_data = OEEDataInsertRequest(prod_start_time=oee_start_time,
prod_end_time=oee_end_time, downtime=downtime,
hierarchy=k, cycle_time=os.environ.get("CYCLE_TIME", default=5),
tz=request_data.tz,
project_id=request_data.project_id)
input_data.total_units, input_data.reject_units = self.batch_oee_handler.get_data_for_tags(
input_data=input_data)
oee_response: BatchOEEData = self.oee_engine.start_batch_oee_calc(
request_data=OEEDataSaveRequest(**input_data.dict()))
data_dict = {
v[TagCategoryConstants.OEE_OUTPUT_CATEGORY]: oee_response.oee,
v[TagCategoryConstants.OEE_OUTPUT_PERFORMANCE_CATEGORY]: oee_response.performance,
v[TagCategoryConstants.OEE_OUTPUT_QUALITY_CATEGORY]: oee_response.quality,
v[TagCategoryConstants.OEE_OUTPUT_QUALITY_LOSS_CATEGORY]: oee_response.quality_loss,
v[TagCategoryConstants.OEE_OUTPUT_PERFORMANCE_LOSS_CATEGORY]: oee_response.performance_loss,
v[TagCategoryConstants.OEE_OUTPUT_AVAILABILITY_CATEGORY]: oee_response.availability,
v[TagCategoryConstants.OEE_OUTPUT_AVAILABILITY_LOSS_CATEGORY]: oee_response.availability_loss
}
message_dict = {
"data": data_dict,
"site_id": site_id,
"gw_id": "",
"pd_id": "",
"p_id": request_data.project_id,
"timestamp": int(time.time() * 1000),
"msg_id": 1,
"retain_flag": False
}
self.data_push.publish_message(message_dict)
except Exception as e:
logger.exception(f"Exception Occurred while calculating oee for the hierarchy {e.args}")
return
if __name__ == '__main__':
while True:
projects_list = os.environ.get("OEE_PROJECTS", default="project_170")
monitor_start_time = os.environ.get("OEE_START_TIME", default="00:00")
for project in projects_list.split(","):
MachineOEECalculator().calculate_machine_oee(
request_data=MachineOEERequest(project_id=project, monitor_time="00:00",
tz="Asia/Kolkata"))
time.sleep(10)
from copy import deepcopy
import pandas as pd import pandas as pd
from scripts.schemas.batch_oee import ChartResponse, ChartDBResponse from scripts.config import DBConf
from scripts.core.engine.oee_calculator import OEETagFinder
from scripts.core.handlers.batch_oee_calc_handler import CalculateBatchOEEHandler
from scripts.core.handlers.common_handler import CommonHandler
from scripts.db.mongo.schema.tag_hierarchy import GetTagsLists, OutputTagsList
from scripts.errors import DataNotFound
from scripts.logging import logger
from scripts.schemas.batch_oee import ChartResponse, ChartDBResponse, ChartRequest, OEEDataInsertRequest
from scripts.utils.common_utils import CommonUtils from scripts.utils.common_utils import CommonUtils
from scripts.utils.kairos_db_util import BaseQuery
from scripts.utils.kairos_db_util.df_formation_util import create_kairos_df
from scripts.utils.kairos_db_util.query_kairos import KairosQuery
class OEEAggregator: class OEEAggregator:
def __init__(self): def __init__(self, project_id=None):
self.common_util = CommonUtils() self.common_util = CommonUtils()
self.base_query = BaseQuery()
self.oee_tag_finder = OEETagFinder()
self.common_handler = CommonHandler(project_id=project_id)
self.oee_handler = CalculateBatchOEEHandler(project_id=project_id)
def processor(self, data): def processor(self, data):
db_response = ChartDBResponse(**data) db_response = ChartDBResponse(**data)
...@@ -24,43 +40,52 @@ class OEEAggregator: ...@@ -24,43 +40,52 @@ class OEEAggregator:
chart_response = ChartResponse(**db_response.dict()) chart_response = ChartResponse(**db_response.dict())
return chart_response.dict() return chart_response.dict()
@staticmethod def aggregator(self, request_data: ChartRequest):
def aggregator(data, activity_length=1): try:
df = pd.DataFrame(data) start_time = int(self.common_util.pendulum_conversion(request_data.queryDate[0], tz=request_data.tz,
df["total_time"] = (df["batch_end_time"] - df["batch_start_time"]) / 60000 timestamp=True)) * 1000
df["actual_cycle"] = df["total_units"] / df["total_time"] end_time = int(self.common_util.pendulum_conversion(request_data.queryDate[-1], tz=request_data.tz,
df["ideal_cycle"] = df["cycle_time"] timestamp=True)) * 1000
df["good_units"] = df["total_units"] - df["reject_units"] hierarchy_tags = self.common_handler.tag_hierarchy_handler.get_tags_list_by_hierarchy(
df["reject_time"] = df["reject_units"] * (1 / df["ideal_cycle"]) GetTagsLists(**request_data.dict()))
agg_oee = df.sum().round(2) total_units_tag_id = self.oee_tag_finder.get_total_units_tag_id(input_data=hierarchy_tags)
availability = (agg_oee["total_time"] - agg_oee["downtime"]) / agg_oee["total_time"] reject_units_tag_id = self.oee_tag_finder.get_reject_units_tag_id(input_data=hierarchy_tags)
performance = agg_oee["productive_time"] / ( output_tags_dict = self.common_handler.tag_hierarchy_handler.get_output_tags_for_oee(
agg_oee["total_time"] - agg_oee["downtime"] input_data=OutputTagsList(**request_data.dict()))
) if not output_tags_dict or not output_tags_dict.get(request_data.hierarchy):
quality = (agg_oee["total_units"] - agg_oee["reject_units"]) / agg_oee[ return {}
"total_units" updated_dict = self.common_handler.validate_hierarchy_tags(output_tags_dict[request_data.hierarchy])
] new_columns_dict = self.common_handler.get_oee_keys_mapping_dict(output_tags_dict[request_data.hierarchy])
oee_overall = round(availability * performance * quality, 2) * 100 tags_list = list(updated_dict.values())
availability_loss = agg_oee["downtime"] / agg_oee["total_time"] * 100 group_by_tags_list = deepcopy(tags_list)
quality_loss = agg_oee["reject_time"] / agg_oee["total_time"] * 100 group_by_tags_list.append(DBConf.KAIROS_DEFAULT_FULL_TAG)
chart_response = ChartResponse( tags_list.extend([total_units_tag_id, reject_units_tag_id])
total_units=round(agg_oee["total_units"] - (len(df) * activity_length)), if not tags_list:
reject_units=agg_oee["reject_units"], return {}
oee=oee_overall, kairos_util = KairosQuery(url=DBConf.KAIROS_URL)
availability=round(availability * 100, 2), data = kairos_util.query(
downtime=agg_oee["downtime"], self.base_query.form_generic_query(tags_list=tags_list,
performance=round(performance * 100, 2), project_id=request_data.project_id,
quality=round(quality * 100, 2), start_epoch=start_time, end_epoch=end_time))
actual_cycle=agg_oee["actual_cycle"], master_df = pd.DataFrame()
ideal_cycle=agg_oee["ideal_cycle"], data = [data] if not isinstance(data, list) else data
good_units=round(agg_oee["good_units"] - (len(df) * activity_length)), for each_data in data:
availability_loss=availability_loss, master_df = create_kairos_df(
quality_loss=quality_loss, master_df=master_df,
performance_loss=round(100 - availability_loss - quality_loss - oee_overall, 2), response_data=each_data,
total_time=agg_oee["total_time"], tags_list=tags_list,
productive_time=agg_oee["productive_time"], group_by_tags=group_by_tags_list,
) tz=request_data.tz
filtered = chart_response.dict() )
remove_keys = ["productive_time", "downtime", "reject_units"] if master_df.empty:
[filtered.pop(each, None) for each in remove_keys] raise DataNotFound
return filtered master_df_columns = list(master_df.columns)
input_data = {"prod_start_time": start_time, "prod_end_time": end_time}
input_data.update(request_data.dict(exclude_none=True))
input_schema = OEEDataInsertRequest(**input_data)
input_schema.total_units, input_schema.reject_units = self.oee_handler.get_data_for_tags(
input_data=input_schema)
except Exception as e:
logger.execption(f'Exception occurred while plotting the dashboard {e.args}')
...@@ -95,23 +95,21 @@ class APIHandler: ...@@ -95,23 +95,21 @@ class APIHandler:
if not request_data.hierarchy: if not request_data.hierarchy:
return dict() return dict()
chart_maker = ChartMaker() chart_maker = ChartMaker()
data = table_obj.get_chart_data( if request_data.reference_id:
hierarchy=request_data.hierarchy, data = table_obj.get_chart_data(
prod_start_time=request_data.queryDate[0], hierarchy=request_data.hierarchy,
prod_end_time=request_data.queryDate[1], prod_start_time=request_data.queryDate[0],
reference_id=request_data.reference_id, prod_end_time=request_data.queryDate[1],
aggregation=request_data.aggregation, reference_id=request_data.reference_id,
tz=request_data.tz tz=request_data.tz
) )
if not request_data.aggregation or len(data) == 1: if isinstance(data, list) and data:
if isinstance(data, list):
data = data[0] data = data[0]
raw_data = self.oee_agg.processor(data) raw_data = self.oee_agg.processor(data)
return chart_maker.main_creator(raw_data, overall=False) return chart_maker.main_creator(raw_data, overall=False)
elif len(data) == 0:
return dict() return dict()
else: else:
agg_data = self.oee_agg.aggregator(data) agg_data = self.oee_agg.aggregator(request_data=request_data)
return chart_maker.main_creator(agg_data) return chart_maker.main_creator(agg_data)
except Exception as e: except Exception as e:
......
...@@ -127,8 +127,6 @@ class CalculateBatchOEEHandler: ...@@ -127,8 +127,6 @@ class CalculateBatchOEEHandler:
datetime.strptime(input_data.prod_end_time, CommonConstants.USER_META_TIME_FORMAT).astimezone( datetime.strptime(input_data.prod_end_time, CommonConstants.USER_META_TIME_FORMAT).astimezone(
tz=pytz.timezone(input_data.tz)).timestamp()) * 1000 tz=pytz.timezone(input_data.tz)).timestamp()) * 1000
hierarchy_tags = self.tag_hierarchy_handler.get_tags_list_by_hierarchy(GetTagsLists(**input_data.dict())) hierarchy_tags = self.tag_hierarchy_handler.get_tags_list_by_hierarchy(GetTagsLists(**input_data.dict()))
# hierarchy_tags = {TagCategoryConstants.TOTAL_UNITS_CATEGORY: "site_114$line_1306$equipment_5812$tag_100",
# TagCategoryConstants.REJECT_UNITS_CATEGORY: "site_114$line_1306$equipment_5812$tag_60538"}
total_units_tag_id = self.oee_tag_finder.get_total_units_tag_id(input_data=hierarchy_tags) total_units_tag_id = self.oee_tag_finder.get_total_units_tag_id(input_data=hierarchy_tags)
reject_units_tag_id = self.oee_tag_finder.get_reject_units_tag_id(input_data=hierarchy_tags) reject_units_tag_id = self.oee_tag_finder.get_reject_units_tag_id(input_data=hierarchy_tags)
kairos_util = KairosQuery(url=DBConf.KAIROS_URL) kairos_util = KairosQuery(url=DBConf.KAIROS_URL)
......
...@@ -64,3 +64,25 @@ class CommonHandler: ...@@ -64,3 +64,25 @@ class CommonHandler:
except Exception as e: except Exception as e:
logger.exception(f'Exception Occurred while validating the tags for a hierarchy {e.args}') logger.exception(f'Exception Occurred while validating the tags for a hierarchy {e.args}')
return {} return {}
@staticmethod
def get_oee_keys_mapping_dict(input_dict: dict):
final_dict = {}
for k, v in input_dict.items():
if k == TagCategoryConstants.TOTAL_UNITS_CATEGORY:
final_dict.update({v: "total_units"})
elif k == TagCategoryConstants.REJECT_UNITS_CATEGORY:
final_dict.update({v: "reject_units"})
elif k == TagCategoryConstants.OEE_OUTPUT_CATEGORY:
final_dict.update({v: "oee"})
elif k == TagCategoryConstants.OEE_OUTPUT_AVAILABILITY_CATEGORY:
final_dict.update({v: "availability"})
elif k == TagCategoryConstants.OEE_OUTPUT_PERFORMANCE_CATEGORY:
final_dict.update({v: "performance"})
elif k == TagCategoryConstants.OEE_OUTPUT_QUALITY_CATEGORY:
final_dict.update({v: "quality"})
elif k == TagCategoryConstants.OEE_OUTPUT_QUALITY_LOSS_CATEGORY:
final_dict.update({v: "quality_loss"})
elif k == TagCategoryConstants.OEE_OUTPUT_AVAILABILITY_LOSS_CATEGORY:
final_dict.update({v: "availability_loss"})
return final_dict
...@@ -10,7 +10,7 @@ class LayoutHandler: ...@@ -10,7 +10,7 @@ class LayoutHandler:
async def save_layout(self, layout_request: SaveLayoutRequest): async def save_layout(self, layout_request: SaveLayoutRequest):
try: try:
data = self.oee_layout_conn.update_layout( data = self.oee_layout_conn.update_layout(
data=layout_request.dict(), project_id=layout_request.project_id data=layout_request.dict(), project_id=layout_request.project_id, upsert=True
) )
return data return data
except Exception: except Exception:
......
...@@ -28,6 +28,8 @@ class TagHierarchyHandler: ...@@ -28,6 +28,8 @@ class TagHierarchyHandler:
hierarchy_str = re.escape("|".join([f'{_each}$tag' for _each in input_data.hierarchy_list])) hierarchy_str = re.escape("|".join([f'{_each}$tag' for _each in input_data.hierarchy_list]))
elif input_data.hierarchy_level: elif input_data.hierarchy_level:
hierarchy_str = input_data.hierarchy_level hierarchy_str = input_data.hierarchy_level
elif input_data.hierarchy:
hierarchy_str = re.escape(f'{input_data.hierarchy}$tag')
else: else:
return {} return {}
aggregate_query = TagHierarchyAggregate.tag_aggregate_by_hierarchy_list(project_id=input_data.project_id, aggregate_query = TagHierarchyAggregate.tag_aggregate_by_hierarchy_list(project_id=input_data.project_id,
......
...@@ -12,6 +12,7 @@ class OutputTagsList(BaseModel): ...@@ -12,6 +12,7 @@ class OutputTagsList(BaseModel):
project_id: str project_id: str
hierarchy_list: Optional[List] hierarchy_list: Optional[List]
hierarchy_level: Optional[str] hierarchy_level: Optional[str]
hierarchy: Optional[str]
class Config: class Config:
schema_extra = { schema_extra = {
......
from datetime import datetime
from typing import Optional, Union, List from typing import Optional, Union, List
from pydantic import BaseModel, validator from pydantic import BaseModel, validator
...@@ -42,10 +43,10 @@ class WaterFallChart(BaseModel): ...@@ -42,10 +43,10 @@ class WaterFallChart(BaseModel):
class ChartRequest(BaseModel): class ChartRequest(BaseModel):
project_id: str project_id: str
queryDate: List[str] queryDate: List[str] = [datetime.now().replace(hour=00, minute=00, second=00),
datetime.now().replace(hour=23, minute=59, second=59)]
hierarchy: Optional[str] hierarchy: Optional[str]
reference_id: Optional[str] reference_id: Optional[str]
aggregation: Optional[bool] = False
tz: Optional[str] = "Asia/kolkata" tz: Optional[str] = "Asia/kolkata"
class Config: class Config:
...@@ -70,7 +71,6 @@ class ChartDBResponse(BaseModel): ...@@ -70,7 +71,6 @@ class ChartDBResponse(BaseModel):
reject_units: int reject_units: int
oee: int oee: int
availability: float availability: float
downtime: int
performance: int performance: int
performance_loss: float performance_loss: float
quality: int quality: int
......
...@@ -5,7 +5,7 @@ import pytz ...@@ -5,7 +5,7 @@ import pytz
import shortuuid import shortuuid
from scripts.config import PathToServices from scripts.config import PathToServices
from scripts.constants import Secrets, EndpointConstants, UOM from scripts.constants import Secrets, EndpointConstants, UOM, CommonConstants
from scripts.db.redis_connections import project_details_db from scripts.db.redis_connections import project_details_db
from scripts.logging import logger from scripts.logging import logger
from scripts.utils.auth_util import ILensRequest, AuthenticationError from scripts.utils.auth_util import ILensRequest, AuthenticationError
...@@ -103,6 +103,12 @@ class CommonUtils: ...@@ -103,6 +103,12 @@ class CommonUtils:
localized_dt = localized_tz.localize(datetime_with_tz) localized_dt = localized_tz.localize(datetime_with_tz)
return localized_dt return localized_dt
@staticmethod
def pendulum_conversion(date_str, tz, output_format=CommonConstants.USER_META_TIME_FORMAT, timestamp=False):
if timestamp:
return pendulum.parse(date_str, tz=tz).timestamp()
return pendulum.parse(date_str, tz=tz).strftime(output_format)
@staticmethod @staticmethod
def get_next_id(_=None) -> str: def get_next_id(_=None) -> str:
return shortuuid.uuid() return shortuuid.uuid()
......
from ilens_kafka_publisher.v2 import KafkaPublisher from ilens_kafka_publisher.v2 import KafkaPublisher
from scripts.config import KafkaConf from scripts.config import KafkaConf
from scripts.db.redis_connections import partition_db
from scripts.logging import logger from scripts.logging import logger
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment