Commit 93af06a4 authored by dasharatha.vamshi's avatar dasharatha.vamshi

init

parent c9307578
# Default ignored files
/shelf/
/workspace.xml
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="jdk" jdkName="Python 3.7 (welspun-defects) (2)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="TestRunnerService">
<option name="PROJECT_TEST_RUNNER" value="pytest" />
</component>
</module>
\ No newline at end of file
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="PyChainedComparisonsInspection" enabled="true" level="WEAK WARNING" enabled_by_default="true">
<option name="ignoreConstantInTheMiddle" value="true" />
</inspection_tool>
<inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredPackages">
<value>
<list size="55">
<item index="0" class="java.lang.String" itemvalue="scikit-image" />
<item index="1" class="java.lang.String" itemvalue="scipy" />
<item index="2" class="java.lang.String" itemvalue="opencv-python" />
<item index="3" class="java.lang.String" itemvalue="torch" />
<item index="4" class="java.lang.String" itemvalue="torchvision" />
<item index="5" class="java.lang.String" itemvalue="absl-py" />
<item index="6" class="java.lang.String" itemvalue="protobuf" />
<item index="7" class="java.lang.String" itemvalue="rsa" />
<item index="8" class="java.lang.String" itemvalue="tensorflow-estimator" />
<item index="9" class="java.lang.String" itemvalue="opt-einsum" />
<item index="10" class="java.lang.String" itemvalue="python-dateutil" />
<item index="11" class="java.lang.String" itemvalue="cycler" />
<item index="12" class="java.lang.String" itemvalue="gast" />
<item index="13" class="java.lang.String" itemvalue="numpy" />
<item index="14" class="java.lang.String" itemvalue="pyasn1" />
<item index="15" class="java.lang.String" itemvalue="requests" />
<item index="16" class="java.lang.String" itemvalue="importlib-metadata" />
<item index="17" class="java.lang.String" itemvalue="pyasn1-modules" />
<item index="18" class="java.lang.String" itemvalue="requests-oauthlib" />
<item index="19" class="java.lang.String" itemvalue="tensorflow" />
<item index="20" class="java.lang.String" itemvalue="tensorboard-plugin-wit" />
<item index="21" class="java.lang.String" itemvalue="zipp" />
<item index="22" class="java.lang.String" itemvalue="oauthlib" />
<item index="23" class="java.lang.String" itemvalue="astunparse" />
<item index="24" class="java.lang.String" itemvalue="urllib3" />
<item index="25" class="java.lang.String" itemvalue="pyparsing" />
<item index="26" class="java.lang.String" itemvalue="Cython" />
<item index="27" class="java.lang.String" itemvalue="Markdown" />
<item index="28" class="java.lang.String" itemvalue="google-auth-oauthlib" />
<item index="29" class="java.lang.String" itemvalue="Werkzeug" />
<item index="30" class="java.lang.String" itemvalue="kiwisolver" />
<item index="31" class="java.lang.String" itemvalue="tqdm" />
<item index="32" class="java.lang.String" itemvalue="yolov5processor" />
<item index="33" class="java.lang.String" itemvalue="tensorboard" />
<item index="34" class="java.lang.String" itemvalue="future" />
<item index="35" class="java.lang.String" itemvalue="matplotlib" />
<item index="36" class="java.lang.String" itemvalue="cachetools" />
<item index="37" class="java.lang.String" itemvalue="grpcio" />
<item index="38" class="java.lang.String" itemvalue="Keras" />
<item index="39" class="java.lang.String" itemvalue="google-auth" />
<item index="40" class="java.lang.String" itemvalue="idna" />
<item index="41" class="java.lang.String" itemvalue="Pillow" />
<item index="42" class="java.lang.String" itemvalue="cython" />
<item index="43" class="java.lang.String" itemvalue="keras" />
<item index="44" class="java.lang.String" itemvalue="imgaug" />
<item index="45" class="java.lang.String" itemvalue="opencv-contrib-python" />
<item index="46" class="java.lang.String" itemvalue="paho-mqtt" />
<item index="47" class="java.lang.String" itemvalue="pymongo" />
<item index="48" class="java.lang.String" itemvalue="fbprophet" />
<item index="49" class="java.lang.String" itemvalue="scikit-learn" />
<item index="50" class="java.lang.String" itemvalue="sklearn" />
<item index="51" class="java.lang.String" itemvalue="statsmodels" />
<item index="52" class="java.lang.String" itemvalue="python-dotenv" />
<item index="53" class="java.lang.String" itemvalue="pystan" />
<item index="54" class="java.lang.String" itemvalue="Cerberus" />
</list>
</value>
</option>
</inspection_tool>
<inspection_tool class="PyPep8NamingInspection" enabled="true" level="WEAK WARNING" enabled_by_default="true">
<option name="ignoredErrors">
<list>
<option value="N802" />
<option value="N806" />
<option value="N801" />
<option value="N803" />
</list>
</option>
</inspection_tool>
<inspection_tool class="PyUnresolvedReferencesInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredIdentifiers">
<list>
<option value="cv2.imresize" />
<option value="detectron2.model_zoo.get_config_file" />
<option value="detectron2.model_zoo.get_checkpoint_url" />
</list>
</option>
</inspection_tool>
</profile>
</component>
\ No newline at end of file
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.7 (welspun-defects) (2)" project-jdk-type="Python SDK" />
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/azure-file-upload.iml" filepath="$PROJECT_DIR$/.idea/azure-file-upload.iml" />
</modules>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>
\ No newline at end of file
FROM python:3.7-slim
ADD . /opt
WORKDIR /opt
RUN pip install -r requirements.txt
CMD python main.py
#---------------Service Configurations----------------#
SERVICE_CONFIG:
LOG_LEVEL: info
LOG_HANDLER_NAME: AzureBlobUpload
LOGSTASH_HOST: 192.168.1.47
LOGSTASH_PORT: 5000
#----------------------If read conf from mongo------------#
FOR_EACH_MONGO_CONFIG:
READ_FROM_MONGO: true
MONGO_URI: mongodb://192.168.0.210:27017
MONGO_DB: iLensAiPipeline
MONGO_RUN_COLL: runMetadata
MONGO_SITE_COLL: siteMetadata
2021-03-01 14:52:31,294 INFO AzureBlobUpload Creating Connection........
2021-03-01 14:52:31,614 INFO AzureBlobUpload Started uploading file at path E:\iLens-AI\azure-blolb-upload\StandardScaler.pkl is uploaded at path azure_file_path on azure
2021-03-01 14:52:31,614 INFO AzureBlobUpload File Uploaded Successfully
2021-03-01 14:52:55,472 INFO AzureBlobUpload Creating Connection........
2021-03-01 14:52:55,738 INFO AzureBlobUpload Started uploading file at path E:\iLens-AI\azure-blolb-upload\StandardScaler.pkl is uploaded at path /data/model/tested/test.pkl on azure
2021-03-01 14:52:55,738 INFO AzureBlobUpload File Uploaded Successfully
import traceback
from azure.storage.blob import BlobServiceClient
from scripts.common.config_parser import *
from scripts.common.constants import AzureUploadBlobConstants, ComponentExceptions
from scripts.common.logsetup import logger
class AzureUpload:
def __init__(self):
self.container = AzureUploadBlobConstants.CONTAINER
self.connection_string = AzureUploadBlobConstants.CONNECTION_STRING
self.blob_service_client = BlobServiceClient.from_connection_string(self.connection_string)
self.container_client = self.blob_service_client.get_container_client(self.container)
def upload_to_blob(self, query):
try:
logger.info("Creating Connection........")
blob_client = self.blob_service_client.get_blob_client(container=self.container,
blob=query[AzureUploadBlobConstants.AZURE_FILE_PATH])
with open(query[AzureUploadBlobConstants.LOCAL_FILE_PATH], "rb") as data:
blob_client.upload_blob(data, overwrite=True)
logger.info("Started uploading file at path " + query[AzureUploadBlobConstants.LOCAL_FILE_PATH] + " is uploaded at path " + query[AzureUploadBlobConstants.AZURE_FILE_PATH] + " on azure")
return True
except Exception as e:
raise Exception(e)
if __name__ == '__main__':
try:
if AzureUploadBlobConstants.AZURE_FILE_PATH in config.keys():
azure_file_path = config[AzureUploadBlobConstants.AZURE_FILE_PATH]
else:
raise Exception(ComponentExceptions.INVALID_AZURE_FILE_PATH_EXCEPTION)
if AzureUploadBlobConstants.LOCAL_FILE_PATH in config.keys():
local_file_path = config[AzureUploadBlobConstants.LOCAL_FILE_PATH]
else:
raise Exception(ComponentExceptions.INVALID_LOCAL_FILE_PATH_EXCEPTION)
obj = AzureUpload()
val = obj.upload_to_blob(config)
if val:
logger.info("File Uploaded Successfully")
except:
logger.info("Web Component Failed")
logger.info(traceback.format_exc())
#!/usr/bin/env python
import os
import sys
import yaml
import json
from pymongo import MongoClient, DESCENDING
from scripts.common.constants import AzureUploadBlobConstants
config_path = os.path.join(os.getcwd(), "conf", "configuration.yml")
if os.path.exists(config_path):
sys.stderr.write("Reading config from --> {}".format(config_path))
sys.stderr.write("\n")
with open(config_path, 'r') as stream:
_config = yaml.safe_load(stream)
else:
sys.stderr.write("Configuration not found...")
sys.stderr.write("Exiting....")
sys.exit(1)
# ----------------- Mongo -----------------------------------------------------------------------
# READ_FROM_MONGO = _config.get("FOR_EACH_MONGO_CONFIG", {}).get('READ_FROM_MONGO', False)
# COMPONENT_NAME = os.environ.get("type", RequestConstants.COMPONENT_NAME)
# pipeline_id = os.environ.get('PIPELINE_ID', default="pipeline_313")
#
# if READ_FROM_MONGO:
# MONGO_URI = os.environ.get("MONGO_URI", _config.get("FOR_EACH_MONGO_CONFIG", {}).get('MONGO_URI'))
# MONGO_DB = os.environ.get("MONGO_DB", _config.get("FOR_EACH_MONGO_CONFIG", {}).get('MONGO_DB'))
# MONGO_RUN_COLL = _config.get("FOR_EACH_MONGO_CONFIG", {}).get('MONGO_RUN_COLL')
# MONGO_SITE_COLL = _config.get("FOR_EACH_MONGO_CONFIG", {}).get('MONGO_SITE_COLL')
#
# db = MongoClient(MONGO_URI)[MONGO_DB]
# get_run_info = db[MONGO_RUN_COLL].find_one({}, sort=[("run_start_time", DESCENDING)])
#
# if get_run_info is None:
# raise Exception('No run info found')
#
# if not get_run_info['job_metadata']['in_progress']:
# raise Exception('No job in progress')
#
# _tag_hierarchy = get_run_info['job_metadata']['in_progress'][0]
# sys.stdout.write(f"_tag_hierarchy --> {_tag_hierarchy}\n")
# _tag_hierarchy = _tag_hierarchy.split('$')
# site_id = _tag_hierarchy[0]
# dept_id = _tag_hierarchy[1]
# line_id = _tag_hierarchy[2]
# equipment_id = _tag_hierarchy[3]
# tag_id = _tag_hierarchy[4]
#
# get_conf = db[MONGO_SITE_COLL].find_one({'site_id': site_id})
#
# dept_dict = list(filter(lambda x: x['dept_id'] == dept_id, get_conf['dept']))
# line_dict = list(filter(lambda x: x['line_id'] == line_id, dept_dict[0]['line']))
# equipment_dict = list(filter(lambda x: x['equipment_id'] == equipment_id, line_dict[0]['equipment']))
# tag_dict = list(filter(lambda x: x['tag_id'] == tag_id, equipment_dict[0]['tag']))
#
# if len(tag_dict) != 1:
# raise Exception(f"Tag details not found for hierarchy {_tag_hierarchy}")
# pipeline_conf = list(filter(lambda x: x['pipeline_id'] == pipeline_id, tag_dict[0]['pipeline_config']))
# if len(pipeline_conf) != 1:
# raise Exception(
# f"Tag details not found for hierarchy {'$'.join(_tag_hierarchy)} not found for pipeline {pipeline_id}")
# component_conf = list(
# filter(lambda x: x['component_type'] == COMPONENT_NAME, pipeline_conf[0]['component_config']))
#
# if len(component_conf) == 0:
# raise Exception(f"Configuration for component {COMPONENT_NAME} not found")
# component_conf = component_conf[0]['env_variables']
#
# for each_key, each_value in component_conf.items():
# if not isinstance(each_value, str):
# component_conf[each_key] = json.dumps(each_value)
#
# os.environ.update(component_conf)
# ---------------- END MONGO -----------------------------------------------------------------
BASE_LOG_PATH = os.path.join(os.getcwd(), "logs")
if not os.path.exists(os.path.join(os.getcwd(), 'logs')):
os.mkdir(os.path.join(os.getcwd(), 'logs'))
LOG_LEVEL = os.environ.get("LOG_LEVEL", _config.get('SERVICE_CONFIG', {}).get("LOG_LEVEL", "INFO")).upper()
LOG_HANDLER_NAME = _config.get('SERVICE_CONFIG', {}).get("LOG_HANDLER_NAME", "AzureBlobUpload")
ENABLE_LOGSTASH_LOG = os.environ.get("ENABLE_LOGSTASH_LOG", 'False').lower()
LOGSTASH_HOST = _config.get('SERVICE_CONFIG', {}).get('LOGSTASH_HOST')
LOGSTASH_PORT = str(_config.get('SERVICE_CONFIG', {}).get('LOGSTASH_PORT'))
os.environ['azure_file_path'] = '/data/model/tested/test.pkl'
os.environ['local_file_path'] = r'E:\iLens-AI\azure-blolb-upload\StandardScaler.pkl'
config = {
"azure_file_path": os.environ.get("azure_file_path"),
"local_file_path": os.environ.get("local_file_path"),
}
# if not os.path.exists(config['shared_volume']):
# sys.stderr.write("Shared path does not exist!")
# sys.stderr.write("Creating path --> {}".format(config['shared_volume']))
# os.makedirs(config['shared_volume'])
#!/usr/bin/env python
class AzureUploadBlobConstants:
CONTAINER = "ilensqa"
CONNECTION_STRING = "DefaultEndpointsProtocol=https;AccountName=azrabsilensqa01;AccountKey=DN6q6kX98JM8yUwtuJh2bAaXUGFo1zRS5HJSsa/ZA+MlmctjC000eHP7bdXiQqkI/MVtADhS8c9E88LI5T4UHw==;EndpointSuffix=core.windows.net"
COMPONENT_NAME = "AzureUpload"
AZURE_FILE_PATH = "azure_file_path"
LOCAL_FILE_PATH = "local_file_path"
HTTP = "http://"
LOG_VAR_MESSAGE = "\n" + "#" * 25 + "\n" + "{}" + "\n" + "#" * 25 + "\n" + "{}\n"
class ComponentExceptions:
INVALID_AZURE_FILE_PATH_EXCEPTION = "AZURE PATH ERROR"
INVALID_LOCAL_FILE_PATH_EXCEPTION = "No File in the local path"
import os
import logging
from logging.handlers import RotatingFileHandler
from logstash_async.handler import AsynchronousLogstashHandler
from scripts.common.config_parser import LOG_LEVEL, LOG_HANDLER_NAME, BASE_LOG_PATH
from scripts.common.config_parser import LOG_LEVEL, LOG_HANDLER_NAME, BASE_LOG_PATH, LOGSTASH_HOST, LOGSTASH_PORT, ENABLE_LOGSTASH_LOG
DEFAULT_FORMAT = '%(asctime)s %(levelname)5s %(name)s %(message)s'
DEBUG_FORMAT = '%(asctime)s %(levelname)5s %(name)s [%(threadName)5s:%(filename)5s:%(funcName)5s():%(lineno)s] %(' \
'message)s '
EXTRA = {}
FORMATTER = DEFAULT_FORMAT
if LOG_LEVEL.strip() == "DEBUG":
FORMATTER = DEBUG_FORMAT
logging.trace = logging.DEBUG - 5
logging.addLevelName(logging.DEBUG - 5, 'TRACE')
class ILensLogger(logging.getLoggerClass()):
def __init__(self, name):
super().__init__(name)
def trace(self, msg, *args, **kwargs):
if self.isEnabledFor(logging.trace):
self._log(logging.trace, msg, args, **kwargs)
def get_logger(log_handler_name):
"""
Purpose : To create logger .
:param log_handler_name: Name of the log handler.
:return: logger object.
"""
log_path = os.path.join(BASE_LOG_PATH, log_handler_name + ".log")
logging.setLoggerClass(ILensLogger)
_logger = logging.getLogger(log_handler_name)
_logger.setLevel(LOG_LEVEL.strip().upper())
log_handler = logging.StreamHandler()
log_handler.setLevel(LOG_LEVEL)
formatter = logging.Formatter(FORMATTER)
log_handler.setFormatter(formatter)
handler = RotatingFileHandler(log_path, maxBytes=10485760,
backupCount=5)
handler.setFormatter(formatter)
_logger.addHandler(log_handler)
_logger.addHandler(handler)
if ENABLE_LOGSTASH_LOG == 'true' and LOGSTASH_PORT is not None and LOGSTASH_HOST is not None and LOGSTASH_PORT.isdigit():
_logger.addHandler(AsynchronousLogstashHandler(LOGSTASH_HOST, int(LOGSTASH_PORT), database_path=None))
return _logger
logger = get_logger(LOG_HANDLER_NAME)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment