Commit 8a0ee94d authored by dasharatha.vamshi's avatar dasharatha.vamshi

sonarlint changes

parent 83131f6a
import traceback
from scripts.common.config_parser import *
from scripts.common.config_parser import config
from scripts.common.constants import AddtoModelStoreConstants, ComponentExceptions
from scripts.common.logsetup import logger
from sklearn.ensemble import RandomForestRegressor
......@@ -9,6 +9,7 @@ import os.path
from os import path
from datetime import date
import uuid
import json
class AddtoModelStore:
......@@ -24,7 +25,7 @@ class AddtoModelStore:
self.artifact_archived_path = self.query['artifact_archived_path']
self.new_model_path = os.path.join(self.component_input_dir, os.listdir(self.component_input_dir)[0])
self.new_model_name = os.listdir(self.component_input_dir)[0]
self.file_rename = self.new_model_name.split('.')[0] + "_" +str(uuid.uuid4()).split('-')[0] + "." + \
self.file_rename = self.new_model_name.split('.')[0] + "_" + str(uuid.uuid4()).split('-')[0] + "." + \
self.new_model_name.split('.')[1]
os.rename(self.new_model_path, os.path.join(self.component_input_dir, self.file_rename))
self.new_model_path = os.path.join(self.component_input_dir, os.listdir(self.component_input_dir)[0])
......@@ -67,14 +68,11 @@ class AddtoModelStore:
logger.info("File at " + source_blob_path + " is moved to " + dest_blob_path)
if copy_properties.status != "success":
dest_blob.abort_copy(copy_properties.id)
raise Exception(
f"Unable to copy blob %s with status %s"
% (source_blob_path, copy_properties.status)
)
logger.info("Unable to copy files from current to archived")
source_blob.delete_blob()
except Exception as e:
logger.info(traceback.format_exc())
raise Exception(e)
logger.info(e)
def check_meta_data(self):
return path.exists(self.query['metadata_file'])
......@@ -92,7 +90,7 @@ class AddtoModelStore:
return True
except Exception as e:
logger.info(traceback.format_exc())
raise Exception(e)
raise FileExistsError(e)
def update_meta_data(self, data):
try:
......@@ -101,13 +99,11 @@ class AddtoModelStore:
if i['id'] == self.ilens_tag_hierarchy:
i['archived'].append(i['current_model'])
i['current_model'] = self.update_current_model
else:
pass
# print(data)
return data
except Exception as e:
logger.info(traceback.format_exc())
raise Exception(e)
raise KeyError(e)
def get_file_name_from_meta_data(self, data):
try:
......@@ -117,19 +113,20 @@ class AddtoModelStore:
return i['current_model']['model_fl_name']
except Exception as e:
logger.info(traceback.format_exc())
raise Exception(e)
raise KeyError(e)
def run(self):
try:
checK_meta_file = self.check_meta_data()
# print(checK_meta_file)
if checK_meta_file:
check_meta_file = self.check_meta_data()
# print(check_meta_file)
if check_meta_file:
logger.info("metadata json file is present.........")
try:
logger.info("Reading Json file")
with open(self.meta_data_file) as f:
data = json.load(f)
except:
except Exception as e:
logger.info(e)
logger.info("Failed reading Json File")
logger.info(traceback.format_exc())
old_model_name = self.get_file_name_from_meta_data(data)
......@@ -149,9 +146,9 @@ class AddtoModelStore:
else:
logger.info("AddToModelStore component failed...............")
return True
except Exception as e:
except Exception as er:
logger.info(traceback.format_exc())
raise Exception(e)
logger.info(er)
if __name__ == '__main__':
......@@ -159,6 +156,6 @@ if __name__ == '__main__':
obj = AddtoModelStore(config)
obj.run()
logger.info("Component ran successfully")
except:
except Exception as e:
logger.info("Model Object Component Failed")
logger.info(traceback.format_exc())
......@@ -69,7 +69,7 @@ except Exception as e:
try:
ilens_tag_hierarchy = os.environ.get('ilens_tag_hierarchy')
except Exception as e:
raise Exception(e)
raise KeyError(e)
BASE_LOG_PATH = os.path.join(os.getcwd(), "logs")
if not os.path.exists(os.path.join(os.getcwd(), 'logs')):
......@@ -81,9 +81,6 @@ ENABLE_LOGSTASH_LOG = os.environ.get("ENABLE_LOGSTASH_LOG", 'False').lower()
LOGSTASH_HOST = _config.get('SERVICE_CONFIG', {}).get('LOGSTASH_HOST')
LOGSTASH_PORT = str(_config.get('SERVICE_CONFIG', {}).get('LOGSTASH_PORT'))
# os.environ['azure_file_path'] = '/data/model/tested/test1.pkl'
# os.environ['local_file_path'] = r'E:\iLens-AI\azure-file-download\StandardScaler.pkl'
config = {
'pipeline_id': pipeline_id,
'run_id': run_id,
......
......@@ -14,3 +14,4 @@ class ComponentExceptions:
INVALID_ARTIFACT_BASE_PATH_EXCEPTION = "Artifact base path value is missing"
INVALID_AZURE_FILE_NAME_EXCEPTION = "Artifact name is missing"
INVALID_CONTAINER_NAME = "Container name is missing"
FAILED_UPLOAD_TO_BLOB = "Failed while uploading to blob"
......@@ -4,10 +4,10 @@
"current_model": {
"model_name": "randomforest",
"model_params": null,
"training_date": "2021-03-10",
"training_date": "2021-03-11",
"framework": null,
"serializedObjectType": "pkl",
"model_fl_name": "modele_a7d835e0.pkl"
"model_fl_name": "model_9547643f_59d89573.pkl"
},
"archived": [
{
......@@ -16,7 +16,31 @@
"training_date": "2021-03-10",
"framework": null,
"serializedObjectType": "pkl",
"model_fl_name": "modele_85194781.pkl"
"model_fl_name": "model_111175b9.pkl"
},
{
"model_name": "randomforest",
"model_params": null,
"training_date": "2021-03-10",
"framework": null,
"serializedObjectType": "pkl",
"model_fl_name": "model_3f0d66a2.pkl"
},
{
"model_name": "randomforest",
"model_params": null,
"training_date": "2021-03-10",
"framework": null,
"serializedObjectType": "pkl",
"model_fl_name": "model_7f17bf54.pkl"
},
{
"model_name": "randomforest",
"model_params": null,
"training_date": "2021-03-10",
"framework": null,
"serializedObjectType": "pkl",
"model_fl_name": "model_07080eaa.pkl"
}
]
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment