Commit 301b249d authored by dasharatha.vamshi's avatar dasharatha.vamshi

cron-job

parent 7333a854
# Default ignored files
/shelf/
/workspace.xml
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml
# Editor-based HTTP Client requests
/httpRequests/
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredPackages">
<value>
<list size="3">
<item index="0" class="java.lang.String" itemvalue="requests" />
<item index="1" class="java.lang.String" itemvalue="gunicorn" />
<item index="2" class="java.lang.String" itemvalue="pycrypto" />
</list>
</value>
</option>
</inspection_tool>
<inspection_tool class="PyUnresolvedReferencesInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredIdentifiers">
<list>
<option value="scripts.utils.dbconnections.Mongoutil.__getitem__" />
</list>
</option>
</inspection_tool>
</profile>
</component>
\ No newline at end of file
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="JavaScriptSettings">
<option name="languageLevel" value="ES6" />
</component>
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.7 (2)" project-jdk-type="Python SDK" />
<component name="PyCharmProfessionalAdvertiser">
<option name="shown" value="true" />
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/mongo2sqlite-event-push.iml" filepath="$PROJECT_DIR$/.idea/mongo2sqlite-event-push.iml" />
</modules>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="jdk" jdkName="Python 3.7 (2)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="RSettings" path="C:\Program Files\R\R-3.4.3\bin\R.exe" />
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>
\ No newline at end of file
FROM python:3.7-stretch
RUN apt-get update && apt-get install -y \
curl apt-utils apt-transport-https debconf-utils gcc build-essential gcc-6-test-results\
&& rm -rf /var/lib/apt/lists/*
RUN curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add -
RUN curl https://packages.microsoft.com/config/ubuntu/16.04/prod.list > /etc/apt/sources.list.d/mssql-release.list
# install libssl - required for sqlcmd to work on Ubuntu 18.04
RUN apt-get update && apt-get install -y libssl1.1 libssl-dev
# install SQL Server drivers
RUN apt-get update && ACCEPT_EULA=Y apt-get install -y msodbcsql17 unixodbc-dev
# install SQL Server tools
RUN apt-get update && ACCEPT_EULA=Y apt-get install -y mssql-tools
RUN echo 'export PATH="$PATH:/opt/mssql-tools/bin"' >> ~/.bashrc
RUN /bin/bash -c "source ~/.bashrc"
RUN pip install pyodbc
RUN apt-get update -y && \
apt-get install -y tzdata && \
rm -rf /var/lib/apt/lists/*
USER root
ADD . /app
WORKDIR /app
RUN pip install -r requirements.txt
RUN rm -rf /var/lib/apt/lists/* && rm -rf /root/.cache/pip/
CMD ["python", "app.py"]
\ No newline at end of file
This diff is collapsed.
import os
from collections import namedtuple
os.environ['config'] = '{"MONGO_URI": "mongodb://svc-ilens:svc2345@192.168.0.220:21017", "MONGO_DB":"ilens_wps", ' \
'"MONGO_COLL": "serviceConfiguration"} '
from scripts.pull_docs_mongo import PullEvents
from scripts.sqlite_ingestor import SQLiteHandler
from scripts.cron_job import CRON
from scripts.common.config import MONGO_DB_OBJ, MONGO_SERVICE_COLL, HOST_CONFIG
from datetime import datetime, timedelta
from scripts.common.logsetup import logger
from scripts.cron_job import *
get_in_out_dt_output = namedtuple("output", "dt_in_time dt_out_time punch_date")
def create_db_obj():
"""Creates and returns the database object
also initialise schemas in the data
Returns:
SQLiteHandler: Database object
"""
db_obj = CRON()
return db_obj
def insert_records(msserver_obj, data):
data = {
"job_run_date": datetime.now(),
"auto_update": 1,
"shift_hours": 8,
"group_event_period_seconds": 300,
"over_time_hours": 3,
"job_interval_minutes": 15
}
x = msserver_obj.insert_records(data)
print(x)
return x
def add(msserver_obj, date):
date = datetime.today()
msserver_obj.insertConfigTime(date)
def main():
"""main function
"""
msserver_obj = create_db_obj() # Vamshi
insert_records(msserver_obj, '')
if __name__ == '__main__':
main()
#
# msserver_obj = create_db_obj()
# # get_config(msserver_obj)
# # insert_records(msserver_obj, '')
# # msserver_obj.insertConfigTime()
-----BEGIN CERTIFICATE-----
MIIDmTCCAoGgAwIBAgIUB/peYGOoRh0ecGnLl7eL+xvs4B4wDQYJKoZIhvcNAQEL
BQAwfDELMAkGA1UEBhMCWFgxDDAKBgNVBAgMA04vQTEMMAoGA1UEBwwDTi9BMSAw
HgYDVQQKDBdTZWxmLXNpZ25lZCBjZXJ0aWZpY2F0ZTEvMC0GA1UEAwwmMTkyLjE2
OC4zLjIyMDogU2VsZi1zaWduZWQgY2VydGlmaWNhdGUwHhcNMjAwNjI1MTExNjE5
WhcNMjIwNjI1MTExNjE5WjB8MQswCQYDVQQGEwJYWDEMMAoGA1UECAwDTi9BMQww
CgYDVQQHDANOL0ExIDAeBgNVBAoMF1NlbGYtc2lnbmVkIGNlcnRpZmljYXRlMS8w
LQYDVQQDDCYxOTIuMTY4LjMuMjIwOiBTZWxmLXNpZ25lZCBjZXJ0aWZpY2F0ZTCC
ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK6HJziXCxI//jEaqcb0x2n7
MixLPhWzqtqdPOP71SQVdmOcB/rf+9pb56UvXSlNRqr7xHi2VZWpACY7Yu+bT3s8
oj1skdTSr3hXs3ADa/mukjxsBZl8KqO1y8WlwGc8QoUqSr/KrMKrF7Cv/6LC2InH
RgvUwVdRZhgDGtD+z3lQI3yVpwFKfh/Z6CESUdTimi3cxhKSSJ8aQiYD09Bn2dWu
3PZyMHSX0szfE7u7zmbW7wvSrvgsawNj2g3sHu8epTtdNAlEQQTnFyyYtP8xuZwg
1Scp5NP3ndEsBY2qf87AjDa771TAJUr4zoCM4wfOwoRZt7lS/iKD9IIp5tg39FkC
AwEAAaMTMBEwDwYDVR0RBAgwBocEwKgD3DANBgkqhkiG9w0BAQsFAAOCAQEAC2ty
QdvVJtnrbEut+S96esenQsHhp3G4AoP/a3lDOUA6Xa0sMDQEazOdKKQTfmvbAIIM
DESnjO/EY/RLR+k79RqSamk5bkdJbDMFkT/60gu8hPvlubR3Wu6U99Kt5Teld7nd
E/L2AAbDlyS4PfWQHm+7iYKBG3do5wFtrPmOSn4iOwp1FzYF5zPnBL8Yc7hWkfsW
+SbyZ+DDc9kLacEKA0sYN+aDbgNg9XvugBKw6te7iwP1vehXJnpiNqy7LbWhcRNu
b4rWqTJ3P0l7sXZs3V2iYBssCXsHSWWIufq9RTXthNdUK178Yl0lVSDIiHFa3kxT
EpfbY1CFRyZeLs3Wiw==
-----END CERTIFICATE-----
This diff is collapsed.
import sys
from datetime import datetime
from pymongo import MongoClient
import json
import os
def licence_validator(payload):
try:
dt = parser.parse(payload['valid_till'])
now = datetime.now()
if (now > dt):
sys.stdout.write("Licence Expired \n".format())
sys.stdout.flush()
return False
return True
except KeyError as e:
sys.stderr.write("Error loading licence")
return False
def get_config_from_mongo(mongo_uri, dbname, basecollection,
key, value):
mongo = MongoClient(mongo_uri)
db = mongo[dbname]
config = db[basecollection].find_one({key: value}, {"_id": False})
return config
# MAIN_OS_VARIABLE = {
# "MONGO_URI": "mongodb://svc-ilens:svc2345@192.168.1.47:21017",
# "MONGO_DB": "ilens_wps",
# "MONGO_COLL": "serviceConfiguration"
# }
MAIN_OS_VARIABLE = json.loads(os.environ.get('config'))
if MAIN_OS_VARIABLE is None:
sys.stderr.write("Configuration not found...")
sys.stderr.write("Exiting....")
sys.exit(1)
MONGO_URI = MAIN_OS_VARIABLE['MONGO_URI']
MONGO_SERVICE_DB = MAIN_OS_VARIABLE['MONGO_DB']
MONGO_SERVICE_COLL = MAIN_OS_VARIABLE['MONGO_COLL']
MONGO_DB_OBJ = MongoClient(MONGO_URI)[MONGO_SERVICE_DB]
HOST_CONFIG = MONGO_DB_OBJ[MONGO_SERVICE_COLL].find_one({'configId': 'hostConfig'}).get('config')
DATA_PATH = MAIN_OS_VARIABLE.get('dataPath', os.path.join(os.getcwd(), "data".format()))
sys.stderr.write("Loading data from {} \n".format(DATA_PATH))
LOG_LEVEL = MAIN_OS_VARIABLE.get("LOG_LEVEL", "DEBUG").upper()
LOG_HANDLER_NAME = MAIN_OS_VARIABLE.get("logHandlerName", "ilens-engine")
BASE_LOG_PATH = MAIN_OS_VARIABLE.get('baseLogPath',
os.path.join(os.getcwd(), "logs".format()))
if not os.path.isdir(BASE_LOG_PATH):
os.mkdir(BASE_LOG_PATH)
if not os.path.isdir(DATA_PATH):
os.mkdir(DATA_PATH)
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from scripts.common.config import LOG_LEVEL, LOG_HANDLER_NAME, BASE_LOG_PATH
import logging
from logging.handlers import RotatingFileHandler
from logging import WARNING,INFO,DEBUG,ERROR
import os
DEFAULT_FORMAT = '%(asctime)s %(levelname)5s %(name)s %(message)s'
DEBUG_FORMAT = '%(asctime)s %(levelname)5s %(name)s [%(threadName)5s:%(filename)5s:%(funcName)5s():%(lineno)s] %(message)s'
EXTRA = {}
FORMATTER = DEFAULT_FORMAT
if LOG_LEVEL.strip() == "DEBUG":
FORMATTER = DEBUG_FORMAT
def get_logger(log_handler_name, extra=EXTRA):
"""
Purpose : To create logger .
:param log_handler_name: Name of the log handler.
:param extra: extra args for the logger
:return: logger object.
"""
log_path = os.path.join(BASE_LOG_PATH, log_handler_name + ".log")
logstash_temp = os.path.join(BASE_LOG_PATH, log_handler_name + ".db")
logger = logging.getLogger(log_handler_name)
logger.setLevel(LOG_LEVEL.strip().upper())
log_handler = logging.StreamHandler()
log_handler.setLevel(LOG_LEVEL)
formatter = logging.Formatter(FORMATTER)
log_handler.setFormatter(formatter)
handler = RotatingFileHandler(log_path, maxBytes=10485760,
backupCount=5)
handler.setFormatter(formatter)
logger.addHandler(log_handler)
logger.addHandler(handler)
logger = logging.LoggerAdapter(logger, extra)
return logger
logger = get_logger(LOG_HANDLER_NAME)
import os
import uuid
from datetime import datetime, date
import random
import sqlalchemy as sa, json
from sqlalchemy import func, desc
from sqlalchemy import cast, Date
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from scripts.common.logsetup import logger
from scripts.common.config import MONGO_DB_OBJ, MONGO_SERVICE_COLL
from datetime import timedelta
Base = declarative_base()
MYSQL_CONFIG = MONGO_DB_OBJ[MONGO_SERVICE_COLL].find_one({'configId': 'msserver'}).get('config')
class configurations(Base):
__tablename__ = MYSQL_CONFIG['test_config']
# id = sa.Column(sa.VARCHAR(10), nullable=False, primary_key=True)
id = sa.Column(sa.Integer, primary_key=True)
job_run_date = sa.Column(sa.DATETIME, nullable=False)
auto_update = sa.Column(sa.INT, nullable=False)
shift_hours = sa.Column(sa.INT, nullable=False)
group_event_period_seconds = sa.Column(sa.INT, nullable=False)
over_time_hours = sa.Column(sa.INT, nullable=False)
job_interval_minutes = sa.Column(sa.INT, nullable=False)
class CRON:
def __init__(self):
logger.info("starting sql engine")
self.MYSQL_CONFIG = MONGO_DB_OBJ[MONGO_SERVICE_COLL].find_one({'configId': 'msserver'}).get('config')
self.path = self.get_path()
self.session = self.db_connect()
self.final_dict = {}
self.config_list = []
self.config = {}
def get_path(self):
return "{}/{}".format(self.MYSQL_CONFIG['uri'], self.MYSQL_CONFIG['database'])
def db_connect(self):
logger.info("Creating db in {}".format(self.path))
engine = sa.create_engine(self.path)
Base.metadata.create_all(engine)
session = sessionmaker(engine)
return session()
def getConfig(self):
del_record = self.session.query(configurations).filter(
cast(configurations.job_run_date, Date) <= (date.today() - timedelta(days=7)))
del_record.delete(synchronize_session=False)
# self.session.delete(del_record)
self.session.commit()
records = self.session.query(configurations).filter(
configurations.id == self.session.query(func.max(configurations.id)))
# print(records)
for record in records:
# print(record)
self.config = record.__dict__
return self.config
def insert_records(self, data):
res = self.session.query(configurations).count()
print(res)
if res == 0:
self.session.add(self.insert(data))
self.session.commit()
else:
data = self.getConfig()
print(data)
if data['auto_update'] == 1 and (data['job_run_date'] + timedelta(minutes=data['job_interval_minutes'])) < datetime.now():
print(data['job_run_date'], data['job_interval_minutes'])
data['job_run_date'] = datetime.now()
self.session.add(self.insert(data))
self.session.commit()
return True
def insertConfigTime(self, job_date):
data = {
"job_run_date": job_date,
"auto_update": 1,
"shift_hours": 8,
"group_event_period_seconds": 300,
"over_time_hours": 3,
"job_interval_minutes": 15
}
self.session.add(self.insert(data))
self.session.commit()
@staticmethod
def insert(data):
return configurations(
job_run_date=data['job_run_date'],
auto_update=data['auto_update'],
shift_hours=data['shift_hours'],
group_event_period_seconds=data['group_event_period_seconds'],
over_time_hours=data['over_time_hours'],
job_interval_minutes=data['job_interval_minutes']
)
if __name__ == '__main__':
d = CRON()
# x = d.get_records()
# print(x)
import uuid
from datetime import datetime, timedelta
from pymongo import DESCENDING
from scripts.common.logsetup import logger
from scripts.common.config import MONGO_DB_OBJ, MONGO_SERVICE_COLL
class PullEvents:
def __init__(self):
APP_MONGO_COLLECTION = MONGO_DB_OBJ[MONGO_SERVICE_COLL].find_one({'configId': 'appMongoConfig'}).get('config')
# print(MONGO_DB_OBJ[APP_MONGO_COLLECTION])
self.event_logs_collection = MONGO_DB_OBJ[APP_MONGO_COLLECTION['eventLogCollection']]
self.employee_configuration = MONGO_DB_OBJ[APP_MONGO_COLLECTION['employeeConfiguration']]
self.testing_collection = MONGO_DB_OBJ[APP_MONGO_COLLECTION['testConfiguration']]
def fetch_timings(self, from_time, to_time, emp_id):
first_event = self.event_logs_collection.find_one({'timestamp': {"$gte": from_time, "$lt": to_time},
'eventMessage.Person ID': emp_id}, {'_id': 0, "frame": 0})
last_event = self.event_logs_collection.find_one({'timestamp': {"$gte": from_time, "$lt": to_time},
'eventMessage.Person ID': emp_id}, {'_id': 0, "frame": 0},
sort=[("timestamp", DESCENDING)])
employee_name = self.employee_configuration.find_one({'emp_id': emp_id}, {'_id': 0}).get('emp_name')
return first_event, last_event, employee_name
def get_attendance(self, from_time, to_time):
unique_emp_id = [each_person for each_person in
self.event_logs_collection.find({'timestamp': {"$gte": from_time, "$lt": to_time}}).distinct(
"eventMessage.Person ID")]
payload = []
dates = [date for date in self.datetime_range(start=from_time, end=to_time)]
dates.append(to_time)
list_of_date_combs = list(zip(dates, dates[1:] + dates[:1]))
del list_of_date_combs[-1]
for each_day in list_of_date_combs:
for each_person in unique_emp_id:
try:
first_event, last_event, employee_name = self.fetch_timings(each_day[0], each_day[1], each_person)
payload.append({
"employee_name": employee_name,
"employee_id": each_person,
"login": first_event.get('timestamp'),
"logout": last_event.get('timestamp')
})
except Exception as e:
logger.error(e)
return payload
def create_event_records(self, emp_id, event_timestamps):
for i in event_timestamps:
data = {'id': str(uuid.uuid1()).split('-')[0], 'emp_id': emp_id, 'timestamp': i}
self.testing_collection.insert(data)
return True
@staticmethod
def datetime_range(start=None, end=None):
span = end - start
for i in range(span.days + 1):
yield start + timedelta(days=i)
if __name__ == '__main__':
d = PullEvents().get_attendance(datetime(2020, 6, 26, 14, 00, 00, 776000),
datetime(2020, 6, 26, 15, 00, 00, 776000))
# empid = "2271"
# timestamps = [datetime.today(), datetime.today()]
# d = PullEvents().create_event_records(empid,timestamps)
for each in d:
print(each)
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment