Commit 4b016bf4 authored by harshavardhan.c's avatar harshavardhan.c

Dev: Added requirements.txt, Dockerfile and enabled data type insertion based on columns or rows.

parent 6449136a
MODULE_NAME=form-management
MONGO_URI=mongodb://192.168.0.220:2717/
METADATA_DB=ilens_configuration
ILENS_ASSISTANT=ilens_assistant
ILENS_ASSET_MODEL_DB=ilens_asset_model
ASSISTANT_URI=postgresql://postgres:postgres@192.168.0.220:5432/ilens_assistant
FORM_DE=http://192.168.0.220/formde/
METADATA_SERVICES=http://192.168.0.220/ilens_api/
KAIROS_URI=http://192.168.0.220:8080/
BASE_PATH=opt/services/ilens2.0/volumes
MOUNT_DIR=/form-management
REDIS_HOST=192.168.0.220
REDIS_PORT=6379
KAFKA_HOST=192.168.0.220
KAFKA_PORT=9092
KAFKA_TOPIC=ilens_dev
KAFKA_AUDIT_TOPIC=audit_logs
MAINTENANCE_URI = postgresql://postgres:postgres@192.168.0.220:5432/maintenance_logbook
FORM_MT = http://192.168.0.220/form-mt/
PERIODIC_ENTRY_AUDITING=true
FORM_NON_PERIODIC_AUDITING=true
FORM_PERIODIC_AUDITING=true
ENABLE_KAFKA_PARTITION=true
ROUND_ROBIN_PARTITION=true
INTERVAL=60
EMAIL_SERVICE_PROXY=https://cloud.ilens.io/sms-util
MQTT_URL=192.168.0.220
MQTT_PORT=1883
FILES_MAPPING={"Quality Report_AC2O.xlsx":{"Pure Tank":"A4:E7"}}
COLUMN_BASED_SHEETS={"Quality Report_AC2O.xlsx":["Sheet1"]}
TEMPLATE_PATH=data/templates
DATA_PATH=C:/Users/harshavardhan.c/Downloads
\ No newline at end of file
FROM tiangolo/uvicorn-gunicorn-fastapi:python3.8-slim
COPY . /app
RUN pip install -r requirements.txt
pymongo~=3.11.3
requests==2.24.0
cryptography>=3.3.1
PyJWT==2.0.1
#python_bcrypt==0.3.2
redis~=3.5.3
Werkzeug>=2.0
pandas~=1.2.3
paho-mqtt==1.5.0
python-dotenv==0.17.1
pydantic~=1.7.3
pydantic[dotenv]
pyjwt[crypto]
pycryptodome~=3.10.1
python-multipart
pendulum~=2.1.2
iteration-utilities==0.11.0
xlsxwriter==1.3.7
uvicorn~=0.13.4
pytz~=2021.1
setuptools~=54.1.1
configparser~=5.0.1
aiofiles~=0.6.0
fastapi~=0.63.0
starlette~=0.13.6
kafka-python
ilens_kafka_publisher
geopy==2.2.0
shortuuid==1.0.8
python-dateutil==2.8.1
openpyxl==3.0.7
Jinja2==2.11.3
PyYAML==6.0
\ No newline at end of file
import json
import os.path import os.path
import sys import sys
from datetime import datetime from datetime import datetime
...@@ -14,9 +15,14 @@ enable_timestamp = os.environ.get("CURRENT_TIMESTAMP", default="true") ...@@ -14,9 +15,14 @@ enable_timestamp = os.environ.get("CURRENT_TIMESTAMP", default="true")
code_timezone = os.environ.get("TIMEZONE", default="Asia/Kolkata") code_timezone = os.environ.get("TIMEZONE", default="Asia/Kolkata")
project_id = os.environ.get("PROJECT_ID", default="project_099") project_id = os.environ.get("PROJECT_ID", default="project_099")
files_mapping = os.environ.get("FILES_MAPPING", default="") files_mapping = os.environ.get("FILES_MAPPING", default="")
columns_based_sheets = os.environ.get("COLUMN_BASED_SHEETS", default={})
if not files_mapping: if not files_mapping:
print("Files Mapping Found empty, existing!!") print("Files Mapping Found empty, existing!!")
sys.exit(0) sys.exit(0)
if not columns_based_sheets:
print("Files Mapping Found empty, existing!!")
sys.exit(0)
columns_based_sheets = json.loads(columns_based_sheets)
class ExcelUploadHandler: class ExcelUploadHandler:
...@@ -27,7 +33,7 @@ class ExcelUploadHandler: ...@@ -27,7 +33,7 @@ class ExcelUploadHandler:
self.project_id = project_id self.project_id = project_id
self.kafka_conn = DataPush() self.kafka_conn = DataPush()
def parse_excel_data(self, template_file_path, data_file_path, template_fill_range, def parse_excel_data(self, template_file_path, data_file_path, template_fill_range, file_name,
sheet_name="Sheet1"): sheet_name="Sheet1"):
try: try:
if not os.path.join(template_file_path): if not os.path.join(template_file_path):
...@@ -41,8 +47,7 @@ class ExcelUploadHandler: ...@@ -41,8 +47,7 @@ class ExcelUploadHandler:
data_parameters = self.get_file_object_data(file=data_file_path, fill_range=template_fill_range, data_parameters = self.get_file_object_data(file=data_file_path, fill_range=template_fill_range,
sheet_name=sheet_name) sheet_name=sheet_name)
self.execute_data_insertion(template_parameters=template_parameters, data_parameters=data_parameters, self.execute_data_insertion(template_parameters=template_parameters, data_parameters=data_parameters,
fill_range=template_fill_range) fill_range=template_fill_range, file_name=file_name, sheet_name=sheet_name)
print(data_parameters)
except Exception as e: except Exception as e:
logger.exception(f"Exception occurred while parsing the excel data {e.args}") logger.exception(f"Exception occurred while parsing the excel data {e.args}")
...@@ -69,7 +74,8 @@ class ExcelUploadHandler: ...@@ -69,7 +74,8 @@ class ExcelUploadHandler:
logger.error(e.args) logger.error(e.args)
raise raise
def execute_data_insertion(self, template_parameters: dict, data_parameters: dict, fill_range): def execute_data_insertion(self, template_parameters: dict, data_parameters: dict, fill_range, sheet_name: str,
file_name: str):
try: try:
__start_coords__, __end_coords__ = self.excel.get_row_column(fill_range) __start_coords__, __end_coords__ = self.excel.get_row_column(fill_range)
start_row, start_column = __start_coords__ start_row, start_column = __start_coords__
...@@ -82,6 +88,7 @@ class ExcelUploadHandler: ...@@ -82,6 +88,7 @@ class ExcelUploadHandler:
'retain_flag': True, 'msg_id': 1, "p_id": ""} 'retain_flag': True, 'msg_id': 1, "p_id": ""}
key_mapping_json = dict(zip(template_parameters.values(), template_parameters.keys())) key_mapping_json = dict(zip(template_parameters.values(), template_parameters.keys()))
date_keys = [item for item in list(key_mapping_json.keys()) if item and item.startswith("date")] date_keys = [item for item in list(key_mapping_json.keys()) if item and item.startswith("date")]
if sheet_name not in columns_based_sheets.get(file_name):
for _row in rows_range: for _row in rows_range:
res = [item for item in list(template_parameters.keys()) if item.endswith(_row)] res = [item for item in list(template_parameters.keys()) if item.endswith(_row)]
_dict = {template_parameters[_element]: data_parameters[_element] for _element in res if _dict = {template_parameters[_element]: data_parameters[_element] for _element in res if
...@@ -92,6 +99,7 @@ class ExcelUploadHandler: ...@@ -92,6 +99,7 @@ class ExcelUploadHandler:
insert_data = self.prepare_data_processor_json(input_json=_dict, data_parameters=data_parameters, insert_data = self.prepare_data_processor_json(input_json=_dict, data_parameters=data_parameters,
key_mapping_json=key_mapping_json, key_mapping_json=key_mapping_json,
date_keys=date_keys, insert_data=insert_data) date_keys=date_keys, insert_data=insert_data)
else:
for _column in columns_range: for _column in columns_range:
res = [item for item in list(template_parameters.keys()) if item.startswith(_column)] res = [item for item in list(template_parameters.keys()) if item.startswith(_column)]
_dict = {template_parameters[_element]: data_parameters[_element] for _element in res if _dict = {template_parameters[_element]: data_parameters[_element] for _element in res if
...@@ -106,7 +114,6 @@ class ExcelUploadHandler: ...@@ -106,7 +114,6 @@ class ExcelUploadHandler:
insert_json.update({ insert_json.update({
"data": v, "p_id": project_id, "msg_id": msg_counter, "timestamp": timestamp "data": v, "p_id": project_id, "msg_id": msg_counter, "timestamp": timestamp
}) })
msg_counter += 1
logger.debug(f"Timestamp: {timestamp}, Values: {insert_data}") logger.debug(f"Timestamp: {timestamp}, Values: {insert_data}")
self.kafka_conn.publish_message(msg=insert_json) self.kafka_conn.publish_message(msg=insert_json)
msg_counter += 1 msg_counter += 1
...@@ -130,7 +137,8 @@ class ExcelUploadHandler: ...@@ -130,7 +137,8 @@ class ExcelUploadHandler:
insert_data: dict): insert_data: dict):
try: try:
date_key = [item for item in list(input_json.keys()) if item and item.startswith("date")] date_key = [item for item in list(input_json.keys()) if item and item.startswith("date")]
timestamp_value = self.convert_date_string_to_timestamp(input_data=date_key) date_key = date_key[0] if date_key else ""
timestamp_value = self.convert_date_string_to_timestamp(input_data=input_json[date_key]) if date_key else ''
if not timestamp_value and date_keys: if not timestamp_value and date_keys:
timestamp_value = data_parameters[key_mapping_json[data_parameters[0]]] timestamp_value = data_parameters[key_mapping_json[data_parameters[0]]]
timestamp_value = self.convert_date_string_to_timestamp(input_data=timestamp_value) timestamp_value = self.convert_date_string_to_timestamp(input_data=timestamp_value)
...@@ -140,6 +148,8 @@ class ExcelUploadHandler: ...@@ -140,6 +148,8 @@ class ExcelUploadHandler:
timestamp_str = str(timestamp_value) timestamp_str = str(timestamp_value)
if timestamp_value not in insert_data: if timestamp_value not in insert_data:
insert_data[timestamp_str] = {} insert_data[timestamp_str] = {}
if date_key:
input_json.pop(date_key, None)
insert_data[timestamp_str].update(input_json) insert_data[timestamp_str].update(input_json)
except Exception as e: except Exception as e:
logger.exception(f"Exception occurred while json creation {e.args}") logger.exception(f"Exception occurred while json creation {e.args}")
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment