Commit 301b249d authored by dasharatha.vamshi's avatar dasharatha.vamshi

cron-job

parent 7333a854
# Default ignored files
/shelf/
/workspace.xml
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml
# Editor-based HTTP Client requests
/httpRequests/
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredPackages">
<value>
<list size="3">
<item index="0" class="java.lang.String" itemvalue="requests" />
<item index="1" class="java.lang.String" itemvalue="gunicorn" />
<item index="2" class="java.lang.String" itemvalue="pycrypto" />
</list>
</value>
</option>
</inspection_tool>
<inspection_tool class="PyUnresolvedReferencesInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredIdentifiers">
<list>
<option value="scripts.utils.dbconnections.Mongoutil.__getitem__" />
</list>
</option>
</inspection_tool>
</profile>
</component>
\ No newline at end of file
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="JavaScriptSettings">
<option name="languageLevel" value="ES6" />
</component>
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.7 (2)" project-jdk-type="Python SDK" />
<component name="PyCharmProfessionalAdvertiser">
<option name="shown" value="true" />
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/mongo2sqlite-event-push.iml" filepath="$PROJECT_DIR$/.idea/mongo2sqlite-event-push.iml" />
</modules>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="jdk" jdkName="Python 3.7 (2)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="RSettings" path="C:\Program Files\R\R-3.4.3\bin\R.exe" />
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>
\ No newline at end of file
FROM python:3.7-stretch
RUN apt-get update && apt-get install -y \
curl apt-utils apt-transport-https debconf-utils gcc build-essential gcc-6-test-results\
&& rm -rf /var/lib/apt/lists/*
RUN curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add -
RUN curl https://packages.microsoft.com/config/ubuntu/16.04/prod.list > /etc/apt/sources.list.d/mssql-release.list
# install libssl - required for sqlcmd to work on Ubuntu 18.04
RUN apt-get update && apt-get install -y libssl1.1 libssl-dev
# install SQL Server drivers
RUN apt-get update && ACCEPT_EULA=Y apt-get install -y msodbcsql17 unixodbc-dev
# install SQL Server tools
RUN apt-get update && ACCEPT_EULA=Y apt-get install -y mssql-tools
RUN echo 'export PATH="$PATH:/opt/mssql-tools/bin"' >> ~/.bashrc
RUN /bin/bash -c "source ~/.bashrc"
RUN pip install pyodbc
RUN apt-get update -y && \
apt-get install -y tzdata && \
rm -rf /var/lib/apt/lists/*
USER root
ADD . /app
WORKDIR /app
RUN pip install -r requirements.txt
RUN rm -rf /var/lib/apt/lists/* && rm -rf /root/.cache/pip/
CMD ["python", "app.py"]
\ No newline at end of file
import os
from collections import namedtuple
os.environ[
'config'] = '{"MONGO_URI": "mongodb://svc-ilens:svc2345@192.168.0.220:21017", "MONGO_DB":"ilens_wps", ' \
'"MONGO_COLL": "serviceConfiguration"} '
from scripts.pull_docs_mongo import PullEvents
from scripts.sqlite_ingestor import SQLiteHandler
from scripts.common.config import MONGO_DB_OBJ, MONGO_SERVICE_COLL, HOST_CONFIG
from datetime import datetime, timedelta
from scripts.common.logsetup import logger
from scripts.sqlite_ingestor import *
get_in_out_dt_output = namedtuple("output", "dt_in_time dt_out_time punch_date")
def create_db_obj():
"""Creates and returns the database object
also initialise schemas in the data
Returns:
SQLiteHandler: Database object
"""
db_obj = SQLiteHandler()
return db_obj
def init_mongo():
"""Create and return mongo db object
Returns:
PullEvents: mongoDB interface
"""
pull_events = None
return pull_events
def fetch_from_mongo(mongo_obj):
"""fetch events data from mongo DB
Args:
mongo_obj (PullEvents): interface to mongo db
Returns:
Dict: {
<employee_id>: {
"employee_name": employee_name,
"events":timestamps
}
}
"""
output = None
return output
def get_shift_window(msserver_obj, emp_id, punch_dates):
"""return a list of get_in_out_dt_output object
Args:
msserver_obj
punch_dates (list): list of datetime objects
Returns:
output (list): 'get_in_out_dt_output'
"""
emp_id = '51'
output = []
punch_dates = [date.today(),date.today()]
for punch_date in punch_dates:
val = msserver_obj.shift_window(emp_id, punch_date)
dt_in_time = str(val['InTime']).replace(':', '')[:4]
dt_out_time = str(val['OutTime']).replace(':', '')[:4]
if int(dt_out_time) - int(dt_in_time) > 0:
dt_in_time = datetime.strptime(str(punch_date).replace('-','') + dt_in_time,'%Y%m%d%H%M')
dt_out_time = datetime.strptime(str(punch_date).replace('-','') + dt_out_time,'%Y%m%d%H%M')
else:
out_punch_date = punch_date + timedelta(days=1)
dt_in_time = datetime.strptime(str(punch_date).replace('-', '') + dt_in_time, '%Y%m%d%H%M')
dt_out_time = datetime.strptime(str(out_punch_date).replace('-', '') + dt_out_time, '%Y%m%d%H%M')
# get_in_out_dt_output = namedtuple("output", "dt_in_time dt_out_time punch_date")
output.append(get_in_out_dt_output(dt_in_time,dt_out_time,punch_date))
print(output)
return None
# def get_shift_window(data, msserver_obj, punch_date):
# """Get shift timings for all the employees
# Args:
# data (Dict): output of `fetch_from_mongo` method
# msserver_obj (SQLiteHandler): interface to msserver
#
# Returns:
# Dict: {
# <employee_id>: {
# "employee_name": employee_name,
# "events":timestamps
# "shift_timings":{
# "in_time":<>,
# "out_time":<>
# }
# }
# """
# Result = {
#
# }
# data = {"50": {"employee_name": "vamshi", "events": "timestamps"}
# }
# # print(2271)
# punch_date = datetime.now().date()
# shift = {
# "shift_timings": {
# 'Date': datetime.today().date(),
# "in_time": '',
# "out_time": ''
# }
# }
# for key, vale in data.items():
# val = msserver_obj.shift_window(key, punch_date)
# # print(val)
# shift['shift_timings']['in_time'] = str(val['InTime']).replace(':', '')[:4]
# shift['shift_timings']['out_time'] = str(val['OutTime']).replace(':', '')[:4]
# # print(val)
# data[key]['shift_timings'] = shift['shift_timings']
# print(data)
# return data
def filter_records(data):
"""filter records
1. Remove all the records that occur in X time period within each other (2 mins)
2. Flag entry record(first) and Flag exit record (last)
3. Fill everything in between
Args:
data (Dict): Output of `get_shift_window` method
Returns:
Dict:{ <employee_id>: {
"employee_name": employee_name,
"events":timestamps
"shift_timings":{
"in_time":<>,
"out_time":<>
}
"F_Tbl_Intm":<>,
"F_Tbl_In1" :<>,
"F_Tbl_Out1" :<>,
"F_Tbl_In2" :<>,
"F_Tbl_Out2" :<>,
"F_Tbl_In3":<>,
"F_Tbl_Out3" :<>,
"F_Tbl_In4" :<>,
"F_Tbl_Out4" :<>,
"F_Tbl_Outtm":<>
}
}
"""
return data
def insert_data(data, msserver_obj):
"""Insert data into MSserver
Args:
data (Dict): output from filter records
msserver (SQLiteHandler): interface to Msserver
"""
data = [{
"emp_id":"1",
"employee_name": "vamshi-1",
"events": "",
"shift_timings": {
"in_time": datetime.today(),
"out_time": datetime.today()
},
'update': True,
'insert': True,
'punch_date': date.today(),
'update_values': {
"F_Tbl_Intm": datetime.today(),
"F_Tbl_In1": datetime.today(),
"F_Tbl_Out1": datetime.today(),
"F_Tbl_In2": datetime.today(),
"F_Tbl_Out2": datetime.today(),
"F_Tbl_Int3": datetime.today(),
"F_Tbl_Out3": datetime.today(),
"F_Tbl_Int4": datetime.today(),
"F_Tbl_Out4": datetime.today(),
"F_Tbl_Outtm": datetime.today()
}
},
{
"emp_id": "2",
"employee_name": "vamshi",
"events": "",
"shift_timings": {
"in_time": datetime.today(),
"out_time": datetime.today()
},
'update': True,
'insert': True,
'punch_date': date.today(),
'update_values': {
"F_Tbl_Intm": datetime.today(),
"F_Tbl_In1": datetime.today(),
"F_Tbl_Out1": datetime.today(),
"F_Tbl_In2": datetime.today(),
"F_Tbl_Out2": datetime.today(),
"F_Tbl_Int3": datetime.today(),
"F_Tbl_Out3": datetime.today(),
"F_Tbl_Int4": datetime.today(),
"F_Tbl_Out4": datetime.today(),
"F_Tbl_Outtm": datetime.today()
}
}
]
print(date.today() - timedelta(days=1))
val = msserver_obj.add_to_db(data)
return None
def del_employee_for_punch_date(msserver_obj, id, punchTime):
found = False
punchTime = date.today()
id = "1"
found = msserver_obj.delpunchDateData(punchTime, id)
if found:
return "Deleted"
else:
return "No Record Found"
def indshift(msserver_obj):
i = 1
row = []
while (i < 101):
row.append(i)
i += 1
print(row)
msserver_obj.insertindshift(row)
def get_config(msserver_obj):
"""gets the most recently inserted row in the configuration
Args:
msserver_obj (SQLiteHandler): db object
Returns:
dict : {
job_run_date :
auto_update :
shift_hours :
over_time_hours :
}
"""
config = msserver_obj.getConfig()
print(config)
return config
def get_data_for_punch_date(msserver_obj, id, punchTime):
"""[summary]
Args:
msserver_obj ([type]): [description]
id ([type]): [description]
punchTime ([type]): [description]
Returns: tuple: found(Boolean):(True if record found else False), row (Dict object if row found else None) True {
'_sa_instance_state': <sqlalchemy.orm.state.InstanceState object at 0x0000020994F1A488>, 'F_Tbl_dtpunched':
datetime.datetime(2020, 7, 11, 14, 50, 59), 'F_Tbl_In4': datetime.datetime(2020, 7, 11, 15, 41, 25),
'F_Tbl_CrBy': '', 'F_Tbl_Intm': datetime.datetime(2020, 7, 11, 15, 28, 2), 'F_Tbl_Out4': datetime.datetime(2020,
7, 11, 15, 41, 25), 'F_Tbl_CrDate': datetime.datetime(2020, 7, 11, 14, 50, 59), 'F_Tbl_In1': datetime.datetime(
2020, 7, 11, 15, 41, 25), 'F_Tbl_Outtm': datetime.datetime(2020, 7, 11, 15, 41, 25), 'F_Tbl_MoBy': 'MI',
'F_Tbl_Out1': datetime.datetime(2020, 7, 11, 15, 41, 25), 'F_Tbl_Sh_Code': None, 'F_Tbl_MoDate':
datetime.datetime(2020, 7, 11, 14, 50, 59), 'F_Tbl_row_id': 4137290254, 'F_Tbl_In2': datetime.datetime(2020, 7,
11, 15, 41, 25), 'F_Tbl_Verified': b'\x00', 'F_Tbl_SentToOracle': None, 'F_Tbl_Emp_Code': '2271', 'F_Tbl_Out2':
datetime.datetime(2020, 7, 11, 15, 41, 25), 'F_Tbl_WorkHours': None, 'F_Tbl_P_TIME': datetime.datetime(2020, 7,
10, 14, 50, 59), 'F_Tbl_In3': datetime.datetime(2020, 7, 11, 15, 41, 25), 'F_Tbl_ShiftHours': None, 'F_Tbl_Out3':
datetime.datetime(2020, 7, 11, 15, 41, 25)}
"""
found = False
punchTime = date.today()
id = "2271"
found, row = msserver_obj.getpunchDateData(id)
print(found, row)
return found, row
def main():
"""main function
"""
msserver_obj = create_db_obj() # Vamshi
mongo_obj = init_mongo() # Sid
# fetch data from mongo
data = fetch_from_mongo(mongo_obj) # Sid
# get shift window
data = get_shift_window(data, msserver_obj) # aggregating information #Vamshi
# filter records
data = filter_records(data) # Sid
# insert employee records
insert_data(data, msserver_obj) # Insert/Update #Vamshi
msserver_obj = create_db_obj()
get_config(msserver_obj)
# print(msserver_obj)
get_shift_window(msserver_obj,"",'')
# insert_data("", msserver_obj)
# get_data_for_punch_date(msserver_obj, "", "")
# mongo = PullEvents()
# indshift(msserver_obj)
empid = "1"
timestamps = [datetime.today(), datetime.today() + timedelta(days=1)]
# mongo.create_event_records(empid, timestamps)
# print(del_employee_for_punch_date(msserver_obj,'',''))
# data = get_shift_window(data, msserver_obj)
# if __name__ == '__main__':
# MYSQL_CONFIG = MONGO_DB_OBJ[MONGO_SERVICE_COLL].find_one({'configId': 'mysqlDB'}).get('config')
# pull_events = PullEvents()
# sql_h = SQLiteHandler()
# from_time = datetime.strptime(MYSQL_CONFIG['last_pull_from'], '%Y-%m-%d %H:%M:%S') + timedelta(hours=1)
# to_time = datetime.strptime(MYSQL_CONFIG['last_pull_to'], '%Y-%m-%d %H:%M:%S') + timedelta(hours=1)
# events = pull_events.get_attendance(
# from_time=from_time,
# to_time=to_time
# )
# logger.info("Collected {} Records".format(len(events)))
# for item in events:
# sql_h.add_event_to_db(item)
# # update the last pull time
# MONGO_DB_OBJ[MONGO_SERVICE_COLL].update_one({'configId': 'mysqlDB'}, {
# "$set": {"config.last_pull_from": from_time.strftime("%Y-%m-%d %H:%M:%S"),
# "config.last_pull_to": to_time.strftime("%Y-%m-%d %H:%M:%S")}})
#
# logger.info("Data load Complete..")
# logger.info("{}, {}".format(from_time, to_time))
import os
from collections import namedtuple
os.environ['config'] = '{"MONGO_URI": "mongodb://svc-ilens:svc2345@192.168.0.220:21017", "MONGO_DB":"ilens_wps", ' \
'"MONGO_COLL": "serviceConfiguration"} '
from scripts.pull_docs_mongo import PullEvents
from scripts.sqlite_ingestor import SQLiteHandler
from scripts.cron_job import CRON
from scripts.common.config import MONGO_DB_OBJ, MONGO_SERVICE_COLL, HOST_CONFIG
from datetime import datetime, timedelta
from scripts.common.logsetup import logger
from scripts.cron_job import *
get_in_out_dt_output = namedtuple("output", "dt_in_time dt_out_time punch_date")
def create_db_obj():
"""Creates and returns the database object
also initialise schemas in the data
Returns:
SQLiteHandler: Database object
"""
db_obj = CRON()
return db_obj
def insert_records(msserver_obj, data):
data = {
"job_run_date": datetime.now(),
"auto_update": 1,
"shift_hours": 8,
"group_event_period_seconds": 300,
"over_time_hours": 3,
"job_interval_minutes": 15
}
x = msserver_obj.insert_records(data)
print(x)
return x
def add(msserver_obj, date):
date = datetime.today()
msserver_obj.insertConfigTime(date)
def main():
"""main function
"""
msserver_obj = create_db_obj() # Vamshi
insert_records(msserver_obj, '')
if __name__ == '__main__':
main()
#
# msserver_obj = create_db_obj()
# # get_config(msserver_obj)
# # insert_records(msserver_obj, '')
# # msserver_obj.insertConfigTime()
-----BEGIN CERTIFICATE-----
MIIDmTCCAoGgAwIBAgIUB/peYGOoRh0ecGnLl7eL+xvs4B4wDQYJKoZIhvcNAQEL
BQAwfDELMAkGA1UEBhMCWFgxDDAKBgNVBAgMA04vQTEMMAoGA1UEBwwDTi9BMSAw
HgYDVQQKDBdTZWxmLXNpZ25lZCBjZXJ0aWZpY2F0ZTEvMC0GA1UEAwwmMTkyLjE2
OC4zLjIyMDogU2VsZi1zaWduZWQgY2VydGlmaWNhdGUwHhcNMjAwNjI1MTExNjE5
WhcNMjIwNjI1MTExNjE5WjB8MQswCQYDVQQGEwJYWDEMMAoGA1UECAwDTi9BMQww
CgYDVQQHDANOL0ExIDAeBgNVBAoMF1NlbGYtc2lnbmVkIGNlcnRpZmljYXRlMS8w
LQYDVQQDDCYxOTIuMTY4LjMuMjIwOiBTZWxmLXNpZ25lZCBjZXJ0aWZpY2F0ZTCC
ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK6HJziXCxI//jEaqcb0x2n7
MixLPhWzqtqdPOP71SQVdmOcB/rf+9pb56UvXSlNRqr7xHi2VZWpACY7Yu+bT3s8
oj1skdTSr3hXs3ADa/mukjxsBZl8KqO1y8WlwGc8QoUqSr/KrMKrF7Cv/6LC2InH
RgvUwVdRZhgDGtD+z3lQI3yVpwFKfh/Z6CESUdTimi3cxhKSSJ8aQiYD09Bn2dWu
3PZyMHSX0szfE7u7zmbW7wvSrvgsawNj2g3sHu8epTtdNAlEQQTnFyyYtP8xuZwg
1Scp5NP3ndEsBY2qf87AjDa771TAJUr4zoCM4wfOwoRZt7lS/iKD9IIp5tg39FkC
AwEAAaMTMBEwDwYDVR0RBAgwBocEwKgD3DANBgkqhkiG9w0BAQsFAAOCAQEAC2ty
QdvVJtnrbEut+S96esenQsHhp3G4AoP/a3lDOUA6Xa0sMDQEazOdKKQTfmvbAIIM
DESnjO/EY/RLR+k79RqSamk5bkdJbDMFkT/60gu8hPvlubR3Wu6U99Kt5Teld7nd
E/L2AAbDlyS4PfWQHm+7iYKBG3do5wFtrPmOSn4iOwp1FzYF5zPnBL8Yc7hWkfsW
+SbyZ+DDc9kLacEKA0sYN+aDbgNg9XvugBKw6te7iwP1vehXJnpiNqy7LbWhcRNu
b4rWqTJ3P0l7sXZs3V2iYBssCXsHSWWIufq9RTXthNdUK178Yl0lVSDIiHFa3kxT
EpfbY1CFRyZeLs3Wiw==
-----END CERTIFICATE-----
This source diff could not be displayed because it is too large. You can view the blob instead.
import sys
from datetime import datetime
from pymongo import MongoClient
import json
import os
def licence_validator(payload):
try:
dt = parser.parse(payload['valid_till'])
now = datetime.now()
if (now > dt):
sys.stdout.write("Licence Expired \n".format())
sys.stdout.flush()
return False
return True
except KeyError as e:
sys.stderr.write("Error loading licence")
return False
def get_config_from_mongo(mongo_uri, dbname, basecollection,
key, value):
mongo = MongoClient(mongo_uri)
db = mongo[dbname]
config = db[basecollection].find_one({key: value}, {"_id": False})
return config
# MAIN_OS_VARIABLE = {
# "MONGO_URI": "mongodb://svc-ilens:svc2345@192.168.1.47:21017",
# "MONGO_DB": "ilens_wps",
# "MONGO_COLL": "serviceConfiguration"
# }
MAIN_OS_VARIABLE = json.loads(os.environ.get('config'))
if MAIN_OS_VARIABLE is None:
sys.stderr.write("Configuration not found...")
sys.stderr.write("Exiting....")
sys.exit(1)
MONGO_URI = MAIN_OS_VARIABLE['MONGO_URI']
MONGO_SERVICE_DB = MAIN_OS_VARIABLE['MONGO_DB']
MONGO_SERVICE_COLL = MAIN_OS_VARIABLE['MONGO_COLL']
MONGO_DB_OBJ = MongoClient(MONGO_URI)[MONGO_SERVICE_DB]
HOST_CONFIG = MONGO_DB_OBJ[MONGO_SERVICE_COLL].find_one({'configId': 'hostConfig'}).get('config')
DATA_PATH = MAIN_OS_VARIABLE.get('dataPath', os.path.join(os.getcwd(), "data".format()))
sys.stderr.write("Loading data from {} \n".format(DATA_PATH))
LOG_LEVEL = MAIN_OS_VARIABLE.get("LOG_LEVEL", "DEBUG").upper()
LOG_HANDLER_NAME = MAIN_OS_VARIABLE.get("logHandlerName", "ilens-engine")
BASE_LOG_PATH = MAIN_OS_VARIABLE.get('baseLogPath',
os.path.join(os.getcwd(), "logs".format()))
if not os.path.isdir(BASE_LOG_PATH):
os.mkdir(BASE_LOG_PATH)
if not os.path.isdir(DATA_PATH):
os.mkdir(DATA_PATH)
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from scripts.common.config import LOG_LEVEL, LOG_HANDLER_NAME, BASE_LOG_PATH
import logging
from logging.handlers import RotatingFileHandler
from logging import WARNING,INFO,DEBUG,ERROR
import os
DEFAULT_FORMAT = '%(asctime)s %(levelname)5s %(name)s %(message)s'
DEBUG_FORMAT = '%(asctime)s %(levelname)5s %(name)s [%(threadName)5s:%(filename)5s:%(funcName)5s():%(lineno)s] %(message)s'
EXTRA = {}
FORMATTER = DEFAULT_FORMAT
if LOG_LEVEL.strip() == "DEBUG":
FORMATTER = DEBUG_FORMAT
def get_logger(log_handler_name, extra=EXTRA):
"""
Purpose : To create logger .
:param log_handler_name: Name of the log handler.
:param extra: extra args for the logger
:return: logger object.
"""
log_path = os.path.join(BASE_LOG_PATH, log_handler_name + ".log")
logstash_temp = os.path.join(BASE_LOG_PATH, log_handler_name + ".db")
logger = logging.getLogger(log_handler_name)
logger.setLevel(LOG_LEVEL.strip().upper())
log_handler = logging.StreamHandler()
log_handler.setLevel(LOG_LEVEL)
formatter = logging.Formatter(FORMATTER)
log_handler.setFormatter(formatter)
handler = RotatingFileHandler(log_path, maxBytes=10485760,
backupCount=5)
handler.setFormatter(formatter)
logger.addHandler(log_handler)
logger.addHandler(handler)
logger = logging.LoggerAdapter(logger, extra)
return logger
logger = get_logger(LOG_HANDLER_NAME)
import os
import uuid
from datetime import datetime, date
import random
import sqlalchemy as sa, json
from sqlalchemy import func, desc
from sqlalchemy import cast, Date
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from scripts.common.logsetup import logger
from scripts.common.config import MONGO_DB_OBJ, MONGO_SERVICE_COLL
from datetime import timedelta
Base = declarative_base()
MYSQL_CONFIG = MONGO_DB_OBJ[MONGO_SERVICE_COLL].find_one({'configId': 'msserver'}).get('config')
class configurations(Base):
__tablename__ = MYSQL_CONFIG['test_config']
# id = sa.Column(sa.VARCHAR(10), nullable=False, primary_key=True)
id = sa.Column(sa.Integer, primary_key=True)
job_run_date = sa.Column(sa.DATETIME, nullable=False)
auto_update = sa.Column(sa.INT, nullable=False)
shift_hours = sa.Column(sa.INT, nullable=False)
group_event_period_seconds = sa.Column(sa.INT, nullable=False)
over_time_hours = sa.Column(sa.INT, nullable=False)
job_interval_minutes = sa.Column(sa.INT, nullable=False)
class CRON:
def __init__(self):
logger.info("starting sql engine")
self.MYSQL_CONFIG = MONGO_DB_OBJ[MONGO_SERVICE_COLL].find_one({'configId': 'msserver'}).get('config')
self.path = self.get_path()
self.session = self.db_connect()
self.final_dict = {}
self.config_list = []
self.config = {}
def get_path(self):
return "{}/{}".format(self.MYSQL_CONFIG['uri'], self.MYSQL_CONFIG['database'])
def db_connect(self):
logger.info("Creating db in {}".format(self.path))
engine = sa.create_engine(self.path)
Base.metadata.create_all(engine)
session = sessionmaker(engine)
return session()
def getConfig(self):
del_record = self.session.query(configurations).filter(
cast(configurations.job_run_date, Date) <= (date.today() - timedelta(days=7)))
del_record.delete(synchronize_session=False)
# self.session.delete(del_record)
self.session.commit()
records = self.session.query(configurations).filter(
configurations.id == self.session.query(func.max(configurations.id)))
# print(records)
for record in records:
# print(record)
self.config = record.__dict__
return self.config
def insert_records(self, data):
res = self.session.query(configurations).count()
print(res)
if res == 0:
self.session.add(self.insert(data))
self.session.commit()
else:
data = self.getConfig()
print(data)
if data['auto_update'] == 1 and (data['job_run_date'] + timedelta(minutes=data['job_interval_minutes'])) < datetime.now():
print(data['job_run_date'], data['job_interval_minutes'])
data['job_run_date'] = datetime.now()
self.session.add(self.insert(data))
self.session.commit()
return True
def insertConfigTime(self, job_date):
data = {
"job_run_date": job_date,
"auto_update": 1,
"shift_hours": 8,
"group_event_period_seconds": 300,
"over_time_hours": 3,
"job_interval_minutes": 15
}
self.session.add(self.insert(data))
self.session.commit()
@staticmethod
def insert(data):
return configurations(
job_run_date=data['job_run_date'],
auto_update=data['auto_update'],
shift_hours=data['shift_hours'],
group_event_period_seconds=data['group_event_period_seconds'],
over_time_hours=data['over_time_hours'],
job_interval_minutes=data['job_interval_minutes']
)
if __name__ == '__main__':
d = CRON()
# x = d.get_records()
# print(x)
import uuid
from datetime import datetime, timedelta
from pymongo import DESCENDING
from scripts.common.logsetup import logger
from scripts.common.config import MONGO_DB_OBJ, MONGO_SERVICE_COLL
class PullEvents:
def __init__(self):
APP_MONGO_COLLECTION = MONGO_DB_OBJ[MONGO_SERVICE_COLL].find_one({'configId': 'appMongoConfig'}).get('config')
# print(MONGO_DB_OBJ[APP_MONGO_COLLECTION])
self.event_logs_collection = MONGO_DB_OBJ[APP_MONGO_COLLECTION['eventLogCollection']]
self.employee_configuration = MONGO_DB_OBJ[APP_MONGO_COLLECTION['employeeConfiguration']]
self.testing_collection = MONGO_DB_OBJ[APP_MONGO_COLLECTION['testConfiguration']]
def fetch_timings(self, from_time, to_time, emp_id):
first_event = self.event_logs_collection.find_one({'timestamp': {"$gte": from_time, "$lt": to_time},
'eventMessage.Person ID': emp_id}, {'_id': 0, "frame": 0})
last_event = self.event_logs_collection.find_one({'timestamp': {"$gte": from_time, "$lt": to_time},
'eventMessage.Person ID': emp_id}, {'_id': 0, "frame": 0},
sort=[("timestamp", DESCENDING)])
employee_name = self.employee_configuration.find_one({'emp_id': emp_id}, {'_id': 0}).get('emp_name')
return first_event, last_event, employee_name
def get_attendance(self, from_time, to_time):
unique_emp_id = [each_person for each_person in
self.event_logs_collection.find({'timestamp': {"$gte": from_time, "$lt": to_time}}).distinct(
"eventMessage.Person ID")]
payload = []
dates = [date for date in self.datetime_range(start=from_time, end=to_time)]
dates.append(to_time)
list_of_date_combs = list(zip(dates, dates[1:] + dates[:1]))
del list_of_date_combs[-1]
for each_day in list_of_date_combs:
for each_person in unique_emp_id:
try:
first_event, last_event, employee_name = self.fetch_timings(each_day[0], each_day[1], each_person)
payload.append({
"employee_name": employee_name,
"employee_id": each_person,
"login": first_event.get('timestamp'),
"logout": last_event.get('timestamp')
})
except Exception as e:
logger.error(e)
return payload
def create_event_records(self, emp_id, event_timestamps):
for i in event_timestamps:
data = {'id': str(uuid.uuid1()).split('-')[0], 'emp_id': emp_id, 'timestamp': i}
self.testing_collection.insert(data)
return True
@staticmethod
def datetime_range(start=None, end=None):
span = end - start
for i in range(span.days + 1):
yield start + timedelta(days=i)
if __name__ == '__main__':
d = PullEvents().get_attendance(datetime(2020, 6, 26, 14, 00, 00, 776000),
datetime(2020, 6, 26, 15, 00, 00, 776000))
# empid = "2271"
# timestamps = [datetime.today(), datetime.today()]
# d = PullEvents().create_event_records(empid,timestamps)
for each in d:
print(each)
import uuid
from datetime import datetime, date
import random
import sqlalchemy as sa, json
from sqlalchemy import func, desc
from sqlalchemy import cast, Date
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from scripts.common.logsetup import logger
from scripts.common.config import MONGO_DB_OBJ, MONGO_SERVICE_COLL
Base = declarative_base()
MYSQL_CONFIG = MONGO_DB_OBJ[MONGO_SERVICE_COLL].find_one({'configId': 'msserver'}).get('config')
class configurations(Base):
__tablename__ = MYSQL_CONFIG['config_table']
id = sa.Column(sa.VARCHAR(10), nullable=False, primary_key=True)
job_run_date = sa.Column(sa.DATETIME, nullable=False)
auto_update = sa.Column(sa.BINARY, nullable=False)
shift_hours = sa.Column(sa.INT, nullable=False)
group_event_period_seconds = sa.Column(sa.INT, nullable=False)
over_time_hours = sa.Column(sa.INT, nullable=True)
class shift_time(Base):
__tablename__ = MYSQL_CONFIG['shift_time']
ShiftType = sa.Column(sa.VARCHAR(8), nullable=False, primary_key=True)
ShiftPattern = sa.Column(sa.VARCHAR(8), nullable=False, primary_key=True)
InTime = sa.Column(sa.VARCHAR(5), nullable=True)
OutTime = sa.Column(sa.VARCHAR(5), nullable=True)
RestDayFix = sa.Column(sa.VARCHAR(1), nullable=True)
RestDay1 = sa.Column(sa.VARCHAR(12), nullable=True)
RestDay2 = sa.Column(sa.VARCHAR(12), nullable=True)
Category = sa.Column(sa.VARCHAR(5), nullable=True)
Remarks = sa.Column(sa.VARCHAR(50), nullable=True)
class Tra_attendance(Base):
__tablename__ = MYSQL_CONFIG['tra_attendance']
F_Tbl_row_id = sa.Column(sa.BIGINT, primary_key=True)
F_Tbl_Emp_Code = sa.Column(sa.NVARCHAR(10), nullable=False)
F_Tbl_P_TIME = sa.Column(sa.DATETIME, nullable=True)
F_Tbl_dtpunched = sa.Column(sa.DATETIME, nullable=False)
F_Tbl_Intm = sa.Column(sa.DATETIME, nullable=True)
F_Tbl_In1 = sa.Column(sa.DATETIME, nullable=True)
F_Tbl_Out1 = sa.Column(sa.DATETIME, nullable=True)
F_Tbl_In2 = sa.Column(sa.DATETIME, nullable=True)
F_Tbl_Out2 = sa.Column(sa.DATETIME, nullable=True)
F_Tbl_Int3 = sa.Column(sa.DATETIME, nullable=True)
F_Tbl_Out3 = sa.Column(sa.DATETIME, nullable=True)
F_Tbl_Int4 = sa.Column(sa.DATETIME, nullable=True)
F_Tbl_Out4 = sa.Column(sa.DATETIME, nullable=True)
F_Tbl_Outtm = sa.Column(sa.DATETIME, nullable=True)
F_Tbl_Sh_Code = sa.Column(sa.NVARCHAR(10), nullable=True)
F_Tbl_Verified = sa.Column(sa.BINARY, nullable=True)
F_Tbl_WorkHours = sa.Column(sa.INT, nullable=True)
F_Tbl_ShiftHours = sa.Column(sa.INT, nullable=True)
F_Tbl_CrBy = sa.Column(sa.NVARCHAR(10), nullable=False)
F_Tbl_CrDate = sa.Column(sa.DATETIME, nullable=False)
F_Tbl_MoBy = sa.Column(sa.NVARCHAR(10), nullable=False)
F_Tbl_MoDate = sa.Column(sa.DATETIME, nullable=False)
F_Tbl_SentToOracle = sa.Column(sa.BINARY, nullable=True)
class ShiftRoaster(Base):
__tablename__ = MYSQL_CONFIG['shiftRoaster']
# id = sa.Column(sa.BIGINT, nullable=False, default=lambda: random.getrandbits(32), primary_key=True)
MonthName = sa.Column(sa.VARCHAR(10), nullable=False, primary_key=True)
ShiftType = sa.Column(sa.VARCHAR(10), nullable=False, primary_key=True)
ShiftPattern = sa.Column(sa.VARCHAR(10), nullable=True)
Day1 = sa.Column(sa.CHAR(3), nullable=True)
Day2 = sa.Column(sa.CHAR(3), nullable=True)
Day3 = sa.Column(sa.CHAR(3), nullable=True)
Day4 = sa.Column(sa.CHAR(3), nullable=True)
Day5 = sa.Column(sa.CHAR(3), nullable=True)
Day6 = sa.Column(sa.CHAR(3), nullable=True)
Day7 = sa.Column(sa.CHAR(3), nullable=True)
Day8 = sa.Column(sa.CHAR(3), nullable=True)
Day9 = sa.Column(sa.CHAR(3), nullable=True)
Day10 = sa.Column(sa.CHAR(3), nullable=True)
Day11 = sa.Column(sa.CHAR(3), nullable=True)
Day12 = sa.Column(sa.CHAR(3), nullable=True)
Day13 = sa.Column(sa.CHAR(3), nullable=True)
Day14 = sa.Column(sa.CHAR(3), nullable=True)
Day15 = sa.Column(sa.CHAR(3), nullable=True)
Day16 = sa.Column(sa.CHAR(3), nullable=True)
Day17 = sa.Column(sa.CHAR(3), nullable=True)
Day18 = sa.Column(sa.CHAR(3), nullable=True)
Day19 = sa.Column(sa.CHAR(3), nullable=True)
Day20 = sa.Column(sa.CHAR(3), nullable=True)
Day21 = sa.Column(sa.CHAR(3), nullable=True)
Day22 = sa.Column(sa.CHAR(3), nullable=True)
Day23 = sa.Column(sa.CHAR(3), nullable=True)
Day24 = sa.Column(sa.CHAR(3), nullable=True)
Day25 = sa.Column(sa.CHAR(3), nullable=True)
Day26 = sa.Column(sa.CHAR(3), nullable=True)
Day27 = sa.Column(sa.CHAR(3), nullable=True)
Day28 = sa.Column(sa.CHAR(3), nullable=True)
Day29 = sa.Column(sa.CHAR(3), nullable=True)
Day30 = sa.Column(sa.CHAR(3), nullable=True)
Day31 = sa.Column(sa.CHAR(3), nullable=True)
# i = 1
# x = 'Date'
# while i < 32:
# # print(i)
# name = x + str(i)
# name = sa.Column(sa.CHAR, length=3, nullable=True)
# i += 1
class IndShiftRoaster(Base):
__tablename__ = MYSQL_CONFIG['IndShiftRoaster']
# id = sa.Column(sa.BIGINT, nullable=False, default=lambda: random.getrandbits(32), primary_key=True)
EmpID = sa.Column(sa.VARCHAR(10), nullable=False, primary_key=True)
MonthName = sa.Column(sa.VARCHAR(10), nullable=False, primary_key=True)
ShiftType = sa.Column(sa.VARCHAR(10), nullable=True)
ShiftPattern = sa.Column(sa.VARCHAR(10), nullable=True)
Day1 = sa.Column(sa.CHAR(6), nullable=True)
Day2 = sa.Column(sa.CHAR(6), nullable=True)
Day3 = sa.Column(sa.CHAR(6), nullable=True)
Day4 = sa.Column(sa.CHAR(6), nullable=True)
Day5 = sa.Column(sa.CHAR(6), nullable=True)
Day6 = sa.Column(sa.CHAR(6), nullable=True)
Day7 = sa.Column(sa.CHAR(6), nullable=True)
Day8 = sa.Column(sa.CHAR(6), nullable=True)
Day9 = sa.Column(sa.CHAR(6), nullable=True)
Day10 = sa.Column(sa.CHAR(6), nullable=True)
Day11 = sa.Column(sa.CHAR(6), nullable=True)
Day12 = sa.Column(sa.CHAR(6), nullable=True)
Day13 = sa.Column(sa.CHAR(6), nullable=True)
Day14 = sa.Column(sa.CHAR(6), nullable=True)
Day15 = sa.Column(sa.CHAR(6), nullable=True)
Day16 = sa.Column(sa.CHAR(6), nullable=True)
Day17 = sa.Column(sa.CHAR(6), nullable=True)
Day18 = sa.Column(sa.CHAR(6), nullable=True)
Day19 = sa.Column(sa.CHAR(6), nullable=True)
Day20 = sa.Column(sa.CHAR(6), nullable=True)
Day21 = sa.Column(sa.CHAR(6), nullable=True)
Day22 = sa.Column(sa.CHAR(6), nullable=True)
Day23 = sa.Column(sa.CHAR(6), nullable=True)
Day24 = sa.Column(sa.CHAR(6), nullable=True)
Day25 = sa.Column(sa.CHAR(6), nullable=True)
Day26 = sa.Column(sa.CHAR(6), nullable=True)
Day27 = sa.Column(sa.CHAR(6), nullable=True)
Day28 = sa.Column(sa.CHAR(6), nullable=True)
Day29 = sa.Column(sa.CHAR(6), nullable=True)
Day30 = sa.Column(sa.CHAR(6), nullable=True)
Day31 = sa.Column(sa.CHAR(6), nullable=True)
class Event(Base):
__tablename__ = MYSQL_CONFIG['table']
id = sa.Column(sa.Integer, primary_key=True)
employee_name = sa.Column(sa.Text)
employee_id = sa.Column(sa.Text)
login = sa.Column(sa.DateTime)
logout = sa.Column(sa.DateTime)
class SQLiteHandler:
def __init__(self):
logger.info("starting sql engine")
self.MYSQL_CONFIG = MONGO_DB_OBJ[MONGO_SERVICE_COLL].find_one({'configId': 'msserver'}).get('config')
self.path = self.get_path()
self.session = self.db_connect()
self.final_dict = {}
self.final_dict2 = {}
self.final_dict3 = {}
self.update_dict = {}
self.punch_dict = {}
self.punch_list = []
self.config = {}
def get_path(self):
return "{}/{}".format(self.MYSQL_CONFIG['uri'], self.MYSQL_CONFIG['database'])
def db_connect(self):
logger.info("Creating db in {}".format(self.path))
engine = sa.create_engine(self.path)
Base.metadata.create_all(engine)
session = sessionmaker(engine)
return session()
def add_event_to_db(self, data):
logger.info("Adding event data...")
res = self.session.query(Event).filter(Event.employee_id == data['employee_id']).filter(
func.DATE(Event.login) == data['login'].date()).filter(func.DATE(Event.logout) == data['logout'].date())
is_present = False
for each in res:
is_present = True
datas = self.session.query(Event).filter(Event.employee_id == data['employee_id']).filter(
func.DATE(Event.login) == data['login'].date()).filter(
func.DATE(Event.logout) == data['logout'].date())[0]
logger.info("Updating {}, {}, {}, {} with {}, {}, {}, {}".format(datas.employee_id, datas.employee_name,
datas.login, datas.logout,
data['employee_id'], data['employee_name'],
data['login'], data['logout']))
self.session.query(Event).filter(Event.employee_id == data['employee_id']).filter(
func.DATE(Event.login) == data['login'].date()).filter(
func.DATE(Event.logout) == data['logout'].date()).update({"logout": data['logout']},
synchronize_session=False)
self.session.commit()
if not is_present:
logger.info("User not present, adding now: {}".format(data['employee_name']))
self.session.add(self.add_event(data))
self.session.commit()
def add_to_db(self, data1):
for data in data1:
try:
# print('----', func.DATE(Tra_attendance.F_Tbl_dtpunched))
# print(date.today())
res = self.session.query(Tra_attendance).filter(Tra_attendance.F_Tbl_Emp_Code == data['emp_id']).filter(
cast(Tra_attendance.F_Tbl_dtpunched, Date) == data['punch_date']).count()
print(res)
if res:
# res1 = self.session.query(Tra_attendance).filter(Tra_attendance.F_Tbl_Emp_Code == key).filter(
# func.DATE(Tra_attendance.F_Tbl_dtpunched) == date.today())
# for record in res1:
# self.update_dict = record.__dict__
# print(self.update_dict)
logger.info("User present, updating now: {}".format(data['emp_id']))
update_dict = data['update_values']
self.session.query(Tra_attendance).filter(
Tra_attendance.F_Tbl_Emp_Code == data['emp_id']).filter(
cast(Tra_attendance.F_Tbl_dtpunched, Date) == data['punch_date']).update(
update_dict,
synchronize_session=False)
self.session.commit()
else:
logger.info("User not present, adding now: {}".format(data['emp_id']))
self.session.add(self.row_to_be_inserted(data, data['emp_id']))
self.session.commit()
except Exception:
self.session.rollback()
logger.error(f"Failed for {data['emp_id']} id", exc_info=True)
def get_events(self, from_time, to_time):
db_records = self.session.query(Event.employee_id, Event.employee_name, Event.login, Event.logout).filter(
Event.login > from_time, Event.logout < to_time)
payload = []
for each_event in db_records:
payload.append({
"employee_id": each_event[0],
"employee_name": each_event[1],
"login": each_event[2],
"logout": each_event[3]
})
return payload
def getpunchDateData(self, id):
subqry = self.session.query(func.max(Tra_attendance.F_Tbl_dtpunched)).filter(
Tra_attendance.F_Tbl_Emp_Code == id)
records = self.session.query(Tra_attendance).filter(Tra_attendance.F_Tbl_Emp_Code == id,
Tra_attendance.F_Tbl_dtpunched == subqry)
# records = self.session.query(Tra_attendance).filter(Tra_attendance.F_Tbl_Emp_Code == id).order_by(
# desc(Tra_attendance.F_Tbl_dtpunched).limit(1))
count = self.session.query(Tra_attendance).filter(Tra_attendance.F_Tbl_Emp_Code == id,
Tra_attendance.F_Tbl_dtpunched == subqry).count()
print(count)
if count:
logger.info("found match with id and punch date: {}".format(id))
status = True
for each in records:
self.punch_dict = each.__dict__
# print(self.punch_dict)
# print(len(self.punch_list))
return status, self.punch_dict
else:
status = False
return status, None
def delpunchDateData(self, punch_date, id):
count = self.session.query(Tra_attendance).filter(Tra_attendance.F_Tbl_Emp_Code == id).filter(
cast(Tra_attendance.F_Tbl_dtpunched, Date) == punch_date).count()
print(count)
if count:
logger.info("found match with id and punch date deleting it: {}, {}".format(id, punch_date))
status = True
del_record = self.session.query(Tra_attendance).filter(Tra_attendance.F_Tbl_Emp_Code == id).filter(
cast(Tra_attendance.F_Tbl_dtpunched, Date) == punch_date).one()
self.session.delete(del_record)
self.session.commit()
return status
else:
return False
def getConfig(self):
records = self.session.query(configurations).filter(
configurations.id == self.session.query(func.max(configurations.id)))
# print(records)
for record in records:
# print(record)
self.config = record.__dict__
return self.config
def insertindshift(self, row):
for i in row:
if int(i) < 51:
self.session.add(self.indshiftinsert(i, 'GS'))
self.session.commit()
else:
self.session.add(self.indshiftinsert(i, 'SHB'))
self.session.commit()
def shift_window(self, emp_id, punch_date):
# print(datetime.now().date())
x = str(punch_date)
x = x.split('-')
column = 'Day' + str(x[-1])
month = punch_date.strftime("%b")
year = punch_date.strftime("%y")
print(month.upper() + year)
monthname = month.upper() + year
# print(column)
# db_records = self.session.query(IndShiftRoaster).filter(IndShiftRoaster.EmpID == emp_id)
records = self.session.query(IndShiftRoaster).filter(IndShiftRoaster.EmpID == emp_id,
IndShiftRoaster.MonthName == monthname)
for record in records:
self.final_dict = record.__dict__
# print(record.__dict__)
shiftType = self.final_dict[column]
# shiftType = shiftType.lstrip()
# print(shiftType)
records2 = self.session.query(ShiftRoaster).filter(ShiftRoaster.ShiftType == shiftType)
for record in records2:
self.final_dict2 = record.__dict__
# print(record.__dict__)
roasterType = self.final_dict2[column]
# print(roasterType)
shift = shiftType.strip()
roaster = roasterType.strip()
print(shift)
records3 = self.session.query(shift_time).filter(shift_time.ShiftType == shift,
shift_time.ShiftPattern == roaster)
for record in records3:
self.final_dict3 = record.__dict__
print(record.__dict__)
# self.punch_list.append(self.final_dict3)
# in_time = final_dict3['']
return self.final_dict3
@staticmethod
def add_event(data):
return Event(
employee_name=data['employee_name'],
employee_id=data['employee_id'],
login=data['login'],
logout=data['logout'],
)
@staticmethod
def indshiftinsert(i, type):
return IndShiftRoaster(
EmpID=str(i),
MonthName='JUL20',
ShiftType=type,
ShiftPattern=type,
Day1=type,
Day2=type,
Day3=type,
Day4=type,
Day5=type,
Day6=type,
Day7=type,
Day8=type,
Day9=type,
Day10=type,
Day11=type,
Day12=type,
Day13=type,
Day14=type,
Day15=type,
Day16=type,
Day17=type,
Day18=type,
Day19=type,
Day20=type,
Day21=type,
Day22=type,
Day23=type,
Day24=type,
Day25=type,
Day26=type,
Day27=type,
Day28=type,
Day29=type,
Day30=type,
Day31=type
)
@staticmethod
def row_to_be_inserted(data, key):
return Tra_attendance(
F_Tbl_Emp_Code=data['emp_id'],
F_Tbl_P_TIME=data['update_values'].get('F_Tbl_Intm', None),
F_Tbl_dtpunched=data['punch_date'],
F_Tbl_Intm=data['update_values'].get('F_Tbl_Intm', None),
F_Tbl_In1=data['update_values'].get('F_Tbl_In1', None),
F_Tbl_Out1=data['update_values'].get('F_Tbl_Out1', None),
F_Tbl_In2=data['update_values'].get('F_Tbl_In2', None),
F_Tbl_Out2=data['update_values'].get('F_Tbl_Out2', None),
F_Tbl_Int3=data['update_values'].get('F_Tbl_Int3', None),
F_Tbl_Out3=data['update_values'].get('F_Tbl_Out3', None),
F_Tbl_Int4=data['update_values'].get('F_Tbl_Int4', None),
F_Tbl_Out4=data['update_values'].get('F_Tbl_Out4', None),
F_Tbl_Outtm=data['update_values'].get('F_Tbl_Outtm', None),
# F_Tbl_Sh_Code=data[''],
# F_Tbl_Verified=,
# F_Tbl_WorkHours=data[''],
# F_Tbl_ShiftHours=data[''],
F_Tbl_CrBy='',
F_Tbl_CrDate=datetime.today(),
F_Tbl_MoBy='MI',
F_Tbl_MoDate=datetime.today(),
# F_Tbl_SentToOracle=,
)
if __name__ == '__main__':
d = SQLiteHandler()
d.add_event_to_db(
{
"employee_name": "sajadsd",
"employee_id": "4343",
"login": datetime.now(),
'logout': datetime.now()
}
)
print(d.get_events(datetime(2020, 6, 25, 10, 00, 49, 776000), datetime(2020, 6, 25, 11, 23, 49, 776000)))
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment