Commit 88460769 authored by arun.uday's avatar arun.uday

migrate to gitlab-pm

parents
# Default ignored files
/shelf/
/workspace.xml
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.9 (task9senderredis)" project-jdk-type="Python SDK" />
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/task9senderredis.iml" filepath="$PROJECT_DIR$/.idea/task9senderredis.iml" />
</modules>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/venv" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>
\ No newline at end of file
# using mongo aggregation operations
import uvicorn
from scripts.config.applications_config import uvicorn_port
from scripts.logging.loggers import logger
# starting the application
if __name__ == "__main__":
try:
print("Redis task")
uvicorn.run("main:app", port=int(uvicorn_port))
except Exception as e:
logger.error("Interruption occurred: ", e)
# pip install python-multipart
[path]
base_path = scripts/
sub_path = temp/
log_path = log/
[file]
file_name_json = data.json
[mqtt]
topic = receptionist
mqtt_host = 192.168.0.220
port = 1883
requests = 60
[redis_db]
redis_host = 127.0.0.1
db_queue_doctor = 0
db_patient_detail = 1
[api_path]
no_doctors = /no_doctors/
close_day = /close_day/
patient_details = /patient_details/
[uvicorn]
uvicorn_port = 8000
[log]
formatter_time = asctime
formatter_level = levelname
# using mongo aggregation operations
import uvicorn
from fastapi import FastAPI
from scripts.config.applications_config import uvicorn_port
from scripts.logging.loggers import logger
from scripts.services.app_services_run import assign_doctor
app = FastAPI()
app.include_router(assign_doctor)
# starting the application
if __name__ == "__main__":
try:
print("Redis task")
uvicorn.run(app, port=int(uvicorn_port))
except Exception as e:
logger.error("Interruption occurred: ", e)
# pip install python-multipart
# reading conf file
import configparser
config = configparser.RawConfigParser()
config.read("conf/applications.conf")
# path
base_path = config.get("path", 'base_path')
sub_path = config.get("path", "sub_path")
log_path = config.get("path", "log_path")
# uvicorn
uvicorn_port = config.get("uvicorn", "uvicorn_port")
# file name
file_name_json = config.get("file", "file_name_json")
full_path_json = base_path + sub_path + file_name_json
# mqtt
topic_name = config.get("mqtt", "topic")
mqtt_host = config.get("mqtt", "mqtt_host")
port = config.get("mqtt", "port")
request_no = config.get("mqtt", "requests")
# redis
redis_host = config.get("redis_db", "redis_host")
db_queue_doctor = config.get("redis_db", "db_queue_doctor")
db_patient_detail = config.get("redis_db", "db_patient_detail")
# log
formatter_time = config.get("log", "formatter_time")
formatter_level = config.get("log", "formatter_level")
# reading conf file
import configparser
config = configparser.RawConfigParser()
config.read("conf/applications.conf")
# api paths
doctors_count = config.get("api_path", "no_doctors")
close_day = config.get("api_path", "close_day")
patient_details = config.get("api_path", "patient_details")
# model for the patient
from pydantic import BaseModel
# patient model
class Patient(BaseModel):
patient_id: str
description: str
age: int
name: str
from scripts.core.handlers.next_doc_existing import assign_doc_exist
from scripts.core.handlers.queue_check import set_patient_queue
from scripts.logging.loggers import logger
# check if the patient exists in the redis db
def patient_data_check(conn_queue, conn_patient, client, dict_data, doctors_counter):
try:
# check if the patient data exists in the db
if not conn_patient.exists(dict_data['patient_id']):
# decoding the next doc queue
next_doc = int(conn_queue.get('next_doc').decode())
set_patient_queue(conn_queue, conn_patient, client, dict_data, next_doc, doctors_counter)
else:
assign_doc_exist(conn_queue, conn_patient, client, dict_data)
except Exception as e:
logger.error("Exception occurred while checking for patients: ", e)
from scripts.logging.loggers import logger
def delete_queue(doctors_counter, conn_queue):
try:
# deleting the queue
for doctors in range(0, doctors_counter):
if not conn_queue.exists(f'doctor{doctors}'):
print(doctors)
else:
# delete doctors
conn_queue.delete(f'doctor{doctors}')
conn_queue.set('next_doc', 0)
conn_queue.delete("Doctors")
doctors_counter -= 1
continue
except Exception as e:
logger.error("Deleting queue error: ", e)
from scripts.logging.loggers import logger
def set_no_doctors(conn_queue, no_doctors):
try:
# entering the number of doctors in to the redis db
conn_queue.set("Doctors", no_doctors)
# creating the queue for doctors if it doesn't exist
for doctors in range(0, no_doctors):
if not conn_queue.exists(f'doctor{doctors}'):
conn_queue.set(f'doctor{doctors}', '')
else:
continue
return no_doctors
except Exception as e:
logger.error("Exception occurred while adding doctors: ", e)
from paho.mqtt.client import Client
from scripts.core.handlers.check_patient import patient_data_check
from scripts.core.handlers.generating_patient_data import generate_patients
from scripts.logging.loggers import logger
def generate_patients_publish(mqtt_host, port, conn_queue, conn_patient, doctors_counter, patient_data_val):
try:
client = Client()
# creating the paho client to listen to the port
client.connect(mqtt_host, int(port))
# generating the patients
dict_data = generate_patients(patient_data_val)
# checking if the next doctor queue exists
if not conn_queue.exists('next_doc'):
conn_queue.set('next_doc', 0)
# checking the patient datas in the db for the queue
patient_data_check(conn_queue, conn_patient, client, dict_data, doctors_counter)
except Exception as e:
logger.error("Exception occurred while generating and publishing: ", e)
# generating the patient details
from scripts.logging.loggers import logger
def generate_patients(model_data):
try:
# generating the user data to dictionary
dict_val = dict(model_data)
return dict_val
except Exception as e:
logger.error("Exception occurred while generating patients: ", e)
# assigning the doctors to new patients
import time
from scripts.logging.loggers import logger
def assign_new_doctor(conn_queue, conn_patient, client, dict_data, next_doc):
try:
avi_doc = conn_queue.get(f'doctor{next_doc}')
# check if the doctor queue is empty
if avi_doc != b'':
print("Wait for the doctor to be free...")
time.sleep(3)
conn_queue.set(f'doctor{next_doc}', '')
# updating the patient details with doctor id
dict_data.update({"doctor": f'doctor{next_doc}'})
# update the db with new data
conn_patient.hmset(dict_data['patient_id'], dict_data)
patient_id = dict_data['patient_id']
# publish the doctor
client.publish(f'doctor{next_doc}', patient_id)
except Exception as e:
logger.error("Exception occurred while assigning new doctor: ", e)
# assigning existing patients to the doctors
import time
from scripts.logging.loggers import logger
def assign_doc_exist(conn_queue, conn_patient, client, dict_data):
try:
# getting the doctor id from the patient db
doctor = conn_patient.hmget(dict_data['patient_id'], 'doctor')
avi_doc = conn_queue.get(doctor[0].decode())
# check if the doctor queue is empty
if avi_doc != b'':
print("Wait for the doctor to be free...")
time.sleep(3)
conn_queue.set(doctor[0].decode(), '')
patient_id = dict_data['patient_id']
# publishing the doctor
client.publish(doctor[0].decode(), patient_id)
except Exception as e:
logger.error("Exception occurred while assigning existing patient: ", e)
from scripts.core.handlers.next_doc_assign import assign_new_doctor
from scripts.logging.loggers import logger
# creating a round-robin for the new patient for the doctor
def set_patient_queue(conn_queue, conn_patient, client, dict_data, next_doc, doctors_counter):
try:
# check if the next doctor is not the last index
if int(next_doc) != doctors_counter:
# assign the doctor
assign_new_doctor(conn_queue, conn_patient, client, dict_data, next_doc)
conn_queue.set('next_doc', next_doc + 1)
else:
conn_queue.set('next_doc', 0)
# setting the doctors queue empty
for doctors in range(0, doctors_counter):
conn_queue.set(f'doctor{doctors}', '')
# assigning new doctor to the patient
assign_new_doctor(conn_queue, conn_patient, client, dict_data, 0)
conn_queue.set('next_doc', 1)
except Exception as e:
logger.error("Exception occurred while checking the queue: ", e)
# redis db connection
import redis
from scripts.config import applications_config
# db for the doctors queue
conn_queue = redis.Redis(applications_config.redis_host, db=applications_config.db_queue_doctor)
# db for the patient
conn_patient = redis.Redis(applications_config.redis_host, db=applications_config.db_patient_detail)
import logging
import os
from logging.handlers import RotatingFileHandler
from scripts.config import applications_config
from scripts.config.applications_config import formatter_time, formatter_level
def get_logger():
"""
Creates a rotating log
"""
__logger__ = logging.getLogger('')
__logger__.setLevel(logging.INFO)
log_formatter = f'%({formatter_time})s - %({formatter_level})-6s - %(message)s'
time_format = "%Y-%m-%d %H:%M:%S"
file_path = applications_config.base_path + applications_config.sub_path + applications_config.log_path
formatter = logging.Formatter(log_formatter, time_format)
if not os.path.exists(file_path):
os.makedirs(file_path)
log_file = os.path.join(f"{file_path}{applications_config.topic_name}.log")
temp_handler = RotatingFileHandler(log_file, maxBytes=1)
temp_handler.setFormatter(formatter)
__logger__.addHandler(temp_handler)
return __logger__
logger = get_logger()
from fastapi import APIRouter
from scripts.constants import api_path_config
from scripts.config.applications_config import mqtt_host, port
from scripts.core.engine.models.patient_model import Patient
from scripts.core.handlers.deleting_doctor_queue import delete_queue
from scripts.core.handlers.doctors_number import set_no_doctors
from scripts.core.handlers.generate_publish import generate_patients_publish
from scripts.database.redis_db import conn_queue, conn_patient
# count the number of doctors for the day
doctors_counter = 0
assign_doctor = APIRouter()
# generating the doctors for the day
@assign_doctor.get(api_path_config.doctors_count)
def get_doctors(no_doctors: int):
try:
global doctors_counter
doctors_counter += set_no_doctors(conn_queue, no_doctors)
except Exception as e:
print(e)
else:
return "Doctors queue created"
# closing the doctors for the day
@assign_doctor.post(api_path_config.close_day)
def get_doctors():
try:
global doctors_counter
delete_queue(doctors_counter, conn_queue)
except Exception as e:
print(e)
else:
return "Doctors deleted"
# getting the producing the patient details for the consumer
@assign_doctor.post(api_path_config.patient_details)
def patient_data(body: Patient):
try:
global doctors_counter
# getting the patient details from the user
patient_data_val = Patient(patient_id=body.patient_id, description=body.description,
age=body.age, name=body.name)
generate_patients_publish(mqtt_host, port, conn_queue, conn_patient, doctors_counter, patient_data_val)
except Exception as e:
print(e)
else:
return "Doctor assigned patient"
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment