import os.path
import psycopg2
import pandas as pd
import os
import re
from pathlib import Path
import webbrowser
from sqlalchemy import create_engine
import io
import logging

postgres_database = os.environ.get('postgres_database', default='jubilant')
postgre_user = os.environ.get('postgre_user', default='postgres')
postgre_password = os.environ.get('postgre_password', default='root@123')
postgre_host = os.environ.get('postgre_host', default='localhost')
postgre_port = os.environ.get('postgre_port', default='5433')
job_id = os.environ.get('JOB_ID', 'job_1')
dir = os.environ.get('LOCAL_PATH', 'D:/Users/vamsikrishna/jubilant_1')


directory_path = os.path.join(dir, job_id)
directory = Path(directory_path)

class PushDataToPostgres:

    def push_postgres(self):
        engine_data = 'postgresql+psycopg2://{}:{}@{}:{}/{}'.format(postgre_user, postgre_password, postgre_host, postgre_port, postgres_database)
        engine = create_engine(engine_data)
        try:
            conn = psycopg2.connect(
                host=postgre_host,
                database=postgres_database,
                user=postgre_user,
                password=postgre_password,
                port=postgre_port)
            logging.info("Connected to postgres successfully")
            cur = conn.cursor()
            dir_list = []
            for filename_ in os.listdir(directory):
                filename = filename_.split('.')
                if len(filename) == 1:
                    dir_list.append(filename_)
            dir_list.sort(key=lambda test_string: list(map(int, re.findall(r'\d+', test_string)))[0])
            for filename_ in dir_list:
                dir = ''
                dir = os.path.join(directory, filename_)
                path = Path(dir)
                k = webbrowser.open(path)
                if k:
                    for files in os.listdir(path):
                        if files.endswith('.csv'):
                            isFile = os.path.isfile(files)
                            if not isFile:
                                file_path = f"{path}\{files}"
                                try:
                                    logging.info("Reading the file from local path to dataframe")
                                    df = pd.read_csv(file_path, error_bad_lines=False)
                                    dir_path_val = os.path.join(directory, files)
                                    dir_path_value = Path(dir_path_val)
                                    df.to_csv(dir_path_value, index=False)
                                except Exception:
                                    pass
                                dir_path = os.path.join(directory, files)
                                isDir = os.path.isfile(dir_path)
                                if isDir:
                                    df = pd.read_csv(file_path, error_bad_lines=False)
                                    columns_data = list(df.columns)
                                    for data in columns_data:
                                        data_date = data.split(' ')
                                        if 'Date' in data_date:
                                            final_date = ' '.join(map(str, data_date))
                                            df[final_date] = pd.to_datetime(df[final_date])

                                    filename_list = files.split('_')
                                    filename = []
                                    for i in filename_list:
                                        m_list = i.split('-')
                                        n_list = i.split('.')
                                        if len(m_list) == 1 and len(n_list) == 1:
                                            filename.append(i)
                                    logging.info("Pushing the data to postgres")
                                    filename = '_'.join(map(str, filename))
                                    filename = filename.lower()
                                    df.head(0).to_sql(filename, engine, if_exists='append', index=False)
                                    conn = engine.raw_connection()
                                    cur = conn.cursor()
                                    output = io.StringIO()
                                    df.to_csv(output, sep='\t', header=False, index=False)
                                    output.seek(0)
                                    contents = output.getvalue()
                                    cur.copy_from(output, filename, null="")
                                    conn.commit()
                                    print('Success')
                                    split_by_hif_data = []
                                    files_dataa = files.split('_')
                                    for i in files_dataa:
                                        s_data = i.split('-')
                                        if len(s_data) > 1:
                                            for j in s_data:
                                                split_by_hif_data.append(j)
                                    split_by_hif_data_final = ''.join(map(str, split_by_hif_data))
                                    files_final_data = (files_dataa[0] + '_' + files_dataa[1]).lower()
                                    final_files_dataa = files_final_data + '_' + split_by_hif_data_final
                                    cur.execute("SELECT filename from audit_table")
                                    row = cur.fetchall()
                                    col_list_data = []
                                    for i in row:
                                        col_list_data.append(i[0])
                                    if final_files_dataa in col_list_data:
                                        logging.info("Updating the status of the file", final_files_dataa)
                                        cur.execute('''UPDATE audit_table SET status = 'Success' WHERE filename = '{}';'''.format(final_files_dataa))
                                        conn.commit()
                            else:
                                logging.info("File already Exsists")
        except Exception as e:
            logging.error(e)


push_data_postgres = PushDataToPostgres()
push_data_postgres.push_postgres()