Commit d36e3c75 authored by Akshay G's avatar Akshay G

Initial push

parent 7d55b4c4
# Default ignored files
/shelf/
/workspace.xml
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.7 (scratch)" project-jdk-type="Python SDK" />
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/read_from_kairos_component.iml" filepath="$PROJECT_DIR$/.idea/read_from_kairos_component.iml" />
</modules>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="jdk" jdkName="Python 3.7 (scratch)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>
\ No newline at end of file
# Read_from_Kairos_Component # Read_from_Kairos_Component
#Use the below script to convert .pt to onnx.
```python
# Import required libraries
import torch
import torchvision.models as models
# Use an existing model from Torchvision, note it
# will download this if not already on your computer (might take time)
model = create_cnn_model(models.resnet34, nc=5)
model.load_state_dict(torch.load("/content/best_resnet34.pth")["model"])
# Create some sample input in the shape this model expects
dummy_input = torch.randn(10, 3, 224, 224)
# # It's optional to label the input and output layers
# input_names = [ "actual_input_1" ] + [ "learned_%d" % i for i in range(16) ]
# output_names = [ "output1" ]
# Use the exporter from torch to convert to onnx
# model (that has the weights and net arch)
torch.onnx.export(model, dummy_input, "resnet34.onnx", verbose=True)
#---------------Service Configurations----------------#
SERVICE_CONFIG:
LOG_LEVEL: info
LOG_HANDLER_NAME: ReadFromKairos
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# pycharm
.idea/
# logs
logs/
\ No newline at end of file
2021-02-12 15:06:05,195 ERROR ReadFromKairos <class 'dict'>
NoneType: None
2021-02-12 15:06:05,195 ERROR ReadFromKairos {'metrics': [{'tags': {'c3': ['site_153$dept_1006$line_220$equipment_1258$tag_5790']}, 'name': 'ilens.live_data.raw'}], 'plugins': [], 'cache_time': 0, 'start_absolute': 1601490600000, 'end_absolute': 1606674600000}
NoneType: None
2021-02-12 15:18:19,044 INFO ReadFromKairos Parsing user requests
2021-02-12 15:18:19,046 INFO ReadFromKairos Querying Karios DB...
2021-02-12 15:18:19,121 INFO ReadFromKairos Receiving data...
2021-02-12 15:18:19,148 INFO ReadFromKairos Data successfully written to --> /mnt/data.csv
import requests
from pandas import DataFrame
from datetime import datetime
from scripts.common.config_parser import *
from scripts.common.logsetup import logger
from scripts.common.constants import KariosConstants, ComponentExceptions
def get_data(query):
logger.info("Parsing user requests")
if KariosConstants.KAIROS_URL_KEY in config.keys():
kairosdb_server = config[KariosConstants.KAIROS_URL_KEY]
else:
raise Exception(ComponentExceptions.INVALID_KAIROS_URL_EXCEPTION)
if config[KariosConstants.TAG_HIERARCHY_KEY] is not None and len(config[KariosConstants.TAG_HIERARCHY_KEY]) > 0:
pass
else:
raise Exception(ComponentExceptions.INVALID_TAG_HIERARCHY_EXCEPTION)
if KariosConstants.METRIC_NAME_KEY in config.keys():
query[KariosConstants.METRICS_KEY][0][KariosConstants.NAME_KEY] = config[KariosConstants.METRIC_NAME_KEY]
else:
raise Exception(ComponentExceptions.INVALID_METRIC_NAME_EXCEPTION)
if KariosConstants.START_ABSOLUTE_KEY in config.keys():
start_time = config[KariosConstants.START_ABSOLUTE_KEY]
try:
start_timestamp = datetime.strptime(start_time, KariosConstants.DATETIME_FORMAT)
timestamp_start = datetime.timestamp(start_timestamp)
query[KariosConstants.START_ABSOLUTE_KEY] = int(timestamp_start) * 1000
query.pop(KariosConstants.START_RELATIVE_KEY)
except ValueError as e:
raise Exception(f"{ComponentExceptions.INVALID_START_TIME_FORMAT_EXCEPTION} - {str(e)}")
elif KariosConstants.START_RELATIVE_KEY in config.keys():
if config[KariosConstants.START_RELATIVE_KEY][KariosConstants.VALUE_KEY] > 0:
query[KariosConstants.START_RELATIVE_KEY][KariosConstants.VALUE_KEY] = config[KariosConstants.
START_RELATIVE_KEY][KariosConstants.VALUE_KEY]
query[KariosConstants.START_RELATIVE_KEY][KariosConstants.UNIT_KEY] = config[KariosConstants.
START_RELATIVE_KEY][KariosConstants.UNIT_KEY]
query.pop(KariosConstants.START_ABSOLUTE_KEY)
else:
raise Exception(ComponentExceptions.INVALID_START_TIME_VALUE_EXCEPTION)
else:
raise Exception(ComponentExceptions.MISSING_START_TIME_VALUE_EXCEPTION)
if KariosConstants.END_ABSOLUTE_KEY in config.keys():
end_time = config[KariosConstants.END_ABSOLUTE_KEY]
try:
end_timestamp = datetime.strptime(end_time, KariosConstants.DATETIME_FORMAT)
timestamp_end = datetime.timestamp(end_timestamp)
query[KariosConstants.END_ABSOLUTE_KEY] = int(timestamp_end) * 1000
query.pop(KariosConstants.END_RELATIVE_KEY)
except ValueError as e:
raise Exception(f"{ComponentExceptions.INVALID_END_TIME_FORMAT_EXCEPTION} - {str(e)}")
elif KariosConstants.END_RELATIVE_KEY in config.keys():
if config[KariosConstants.END_RELATIVE_KEY][KariosConstants.VALUE_KEY] > 0:
query[KariosConstants.END_RELATIVE_KEY][KariosConstants.VALUE_KEY] = config[KariosConstants.
END_RELATIVE_KEY][KariosConstants.VALUE_KEY]
query[KariosConstants.END_RELATIVE_KEY][KariosConstants.UNIT_KEY] = config[KariosConstants.
END_RELATIVE_KEY][KariosConstants.UNIT_KEY]
query.pop(KariosConstants.END_ABSOLUTE_KEY)
else:
raise Exception(ComponentExceptions.INVALID_START_END_VALUE_EXCEPTION)
else:
query.pop(KariosConstants.END_ABSOLUTE_KEY)
query.pop(KariosConstants.END_RELATIVE_KEY)
if KariosConstants.AGGREGATORS_KEY not in config.keys():
query[KariosConstants.METRICS_KEY][0].pop(KariosConstants.AGGREGATORS_KEY)
if KariosConstants.GROUPBY_KEY not in config.keys():
query[KariosConstants.METRICS_KEY][0].pop(KariosConstants.GROUPBY_KEY)
if KariosConstants.TAG_HIERARCHY_KEY in config.keys():
query[KariosConstants.METRICS_KEY][0][KariosConstants.TAGS_KEY][KariosConstants.C3_KEY] = [config[
KariosConstants.
TAG_HIERARCHY_KEY]]
else:
raise Exception(ComponentExceptions.MISSING_TAG_HIERARCHY_EXCEPTION)
logger.info("Querying Karios DB...")
response = requests.post(kairosdb_server + KariosConstants.KARIOS_API, data=json.dumps(query))
if response.status_code == KariosConstants.REQUEST_SUCCESS_CODE:
logger.info("Receiving data...")
data = response.json()[KariosConstants.QUERIES_KEY][0][KariosConstants.RESULTS_KEY][0] \
[KariosConstants.VALUES_KEY]
df = DataFrame.from_dict(data)
return df
else:
raise Exception(response.json()[KariosConstants.ERRORS_KEY])
if __name__ == '__main__':
data = get_data(KariosConstants.QUERY)
data.to_csv(os.path.join(config['shared_volume'], 'data.csv'))
logger.info("Data successfully written to --> {}".format(os.path.join(config['shared_volume'], 'data.csv')))
\ No newline at end of file
import os
import sys
import requests
import json
import pandas as pd
import time
import datetime
# from date and name are required
os.environ['config'] = '{"kairosdb_server": "http://192.168.0.207:8080","metric_name": "ilens.live_data.raw",' \
'"c3": "site_153$dept_1006$line_220$equipment_1258$tag_5790", "start_absolute": "01/10/2020 ' \
'00:00:00","end_absolute": "30/11/2020 00:00:00","path": "/va"} '
query = {
"metrics": [
{
"tags": {
},
"name": None,
"aggregators": None,
"group_by": None
}
],
"plugins": [],
"cache_time": 0,
"start_absolute": None,
"end_absolute": None,
"start_relative": {
"value": None,
"unit": None
},
"end_relative": {
"value": None,
"unit": None
}
}
def read_kairos(config):
# post method
start_abs_flag = False
start_rel_flag = False
end_abs_flag = False
end_rel_flag = False
if "kairosdb_server" in config.keys():
kairosdb_server = config['kairosdb_server']
else:
raise Exception("Kairos url should be required")
if config["c3"] is not None and len(config["c3"]) > 0:
pass
else:
raise Exception("c3 values should not be null")
if "metric_name" in config.keys():
query["metrics"][0]["name"] = config["metric_name"]
else:
print("metric_name key is not there")
raise Exception("metric_name key is not there")
if "start_absolute" in config.keys():
start_time = config['start_absolute']
try:
start_timestamp = datetime.datetime.strptime(start_time, "%d/%m/%Y %H:%M:%S")
start_abs_flag = True
timestamp_start = datetime.datetime.timestamp(start_timestamp)
query["start_absolute"] = int(timestamp_start) * 1000
query.pop("start_relative")
except:
raise Exception("start time should be dd/mm/yyyy hh/MM/SS")
elif "start_relative" in config.keys():
if config['start_relative']['value'] > 0:
start_rel_flag = True
query['start_relative']['value'] = config['start_relative']['value']
query['start_relative']['unit'] = config['start_relative']['unit']
query.pop("start_absolute")
else:
raise Exception("start time should be greater than zero")
else:
print("need start date key")
raise Exception("need start date key")
if "end_absolute" in config.keys():
end_time = config['end_absolute']
try:
end_timestamp = datetime.datetime.strptime(end_time, "%d/%m/%Y %H:%M:%S")
end_abs_flag = True
timestamp_end = datetime.datetime.timestamp(end_timestamp)
query["end_absolute"] = int(timestamp_end) * 1000
query.pop("end_relative")
except:
raise Exception("start time should be dd/mm/yyyy hh/MM/SS")
elif "end_relative" in config.keys():
if config['end_relative']['value'] > 0:
end_rel_flag = True
query['end_relative']['value'] = config['end_relative']['value']
query['end_relative']['unit'] = config['end_relative']['unit']
query.pop("end_absolute")
else:
raise Exception("end time should be greater than zero and less than start time")
else:
# pop both keys
query.pop("end_absolute")
query.pop("end_relative")
if 'aggregators' not in config.keys():
query['metrics'][0].pop('aggregators')
if 'group_by' not in config.keys():
query['metrics'][0].pop('group_by')
if "c3" in config.keys():
query["metrics"][0]["tags"]["c3"] = [config["c3"]]
else:
raise Exception("required tag name c3")
print(type(query))
print(query)
response = requests.post(kairosdb_server + "/api/v1/datapoints/query", data=json.dumps(query))
# print("Status code: %d" % response.status_code)
# print(type(response.status_code))
if response.status_code == 200:
# print(response.text)
data = response.json()["queries"][0]["results"][0]["values"]
# create df
df = pd.DataFrame.from_dict(data)
return df
else:
raise Exception(response.json()['errors'])
if __name__ == "__main__":
config = json.loads(os.environ.get('config'))
if config is None:
sys.stderr.write("Configuration not found...")
sys.stderr.write("Exiting....")
sys.exit(1)
main_path = "/opt"
if not os.path.exists(main_path + config['path']):
os.makedirs(main_path + config['path'])
df = read_kairos(config)
try:
path = os.path.join(main_path + config['path'], "out.csv")
# print(path)
df.to_csv(path)
except:
raise Exception("Failed to create csv file")
#!/usr/bin/env python
import os
import sys
import json
import yaml
os.environ['config'] = '{"kairosdb_url": "http://192.168.0.207:8080","metric_name": "ilens.live_data.raw",' \
'"tag_hierarchy": "site_153$dept_1006$line_220$equipment_1258$tag_5790", "start_absolute": ' \
'"01/10/2020 ' \
'00:00:00","end_absolute": "30/11/2020 00:00:00","shared_volume": "/mnt"}'
config_path = os.path.join(os.getcwd(), "conf", "configuration.yml")
if os.path.exists(config_path):
sys.stderr.write("Reading config from --> {}".format(config_path))
sys.stderr.write("\n")
with open(config_path, 'r') as stream:
_config = yaml.safe_load(stream)
else:
sys.stderr.write("Configuration not found...")
sys.stderr.write("Exiting....")
sys.exit(1)
BASE_LOG_PATH = os.path.join(os.getcwd(), "logs")
if not os.path.exists(os.path.join(os.getcwd(), 'logs')):
os.mkdir(os.path.join(os.getcwd(), 'logs'))
LOG_LEVEL = _config.get('SERVICE_CONFIG', {}).get("LOG_LEVEL", "INFO").upper()
LOG_HANDLER_NAME = _config.get('SERVICE_CONFIG', {}).get("LOG_HANDLER_NAME", "ReadFromKairos")
config = json.loads(os.environ.get('config'))
if not os.path.exists(config['shared_volume']):
raise Exception("Shared path does not exist!")
#!/usr/bin/env python
class KariosConstants:
KAIROS_URL_KEY = "kairosdb_url"
METRIC_NAME_KEY = "metric_name"
TAG_HIERARCHY_KEY = "tag_hierarchy"
SHARED_VOLUME_KEY = "shared_volume"
START_ABSOLUTE_KEY = "start_absolute"
START_RELATIVE_KEY = "start_relative"
END_ABSOLUTE_KEY = "end_absolute"
END_RELATIVE_KEY = "end_relative"
VALUE_KEY = "value"
VALUES_KEY = "values"
METRICS_KEY = "metrics"
NAME_KEY = "name"
UNIT_KEY = "unit"
AGGREGATORS_KEY = "aggregators"
GROUPBY_KEY = "group_by"
TAGS_KEY = "tags"
C3_KEY = "c3"
QUERIES_KEY = "queries"
RESULTS_KEY = "results"
ERRORS_KEY = "errors"
DATETIME_FORMAT = "%d/%m/%Y %H:%M:%S"
KARIOS_API = "/api/v1/datapoints/query"
REQUEST_SUCCESS_CODE = 200
PLUGINS_KEY = "plugins"
CACHE_TIME_KEY = "cache_time"
QUERY = {
METRICS_KEY: [
{
TAGS_KEY: {
},
NAME_KEY: None,
AGGREGATORS_KEY: None,
GROUPBY_KEY: None
}
],
PLUGINS_KEY: [],
CACHE_TIME_KEY: 0,
START_ABSOLUTE_KEY: None,
END_ABSOLUTE_KEY: None,
START_RELATIVE_KEY: {
VALUE_KEY: None,
UNIT_KEY: None
},
END_RELATIVE_KEY: {
VALUE_KEY: None,
UNIT_KEY: None
}
}
class ComponentExceptions:
INVALID_KAIROS_URL_EXCEPTION = "Kairos url should be required"
INVALID_TAG_HIERARCHY_EXCEPTION = "Tag Hierarchy values should not be null"
INVALID_METRIC_NAME_EXCEPTION = "metric_name key is not there"
INVALID_START_TIME_FORMAT_EXCEPTION = "start time should be dd/mm/yyyy hh/MM/SS"
INVALID_END_TIME_FORMAT_EXCEPTION = "end time should be dd/mm/yyyy hh/MM/SS"
INVALID_START_TIME_VALUE_EXCEPTION = "start time should be greater than zero"
MISSING_START_TIME_VALUE_EXCEPTION = "need start date key"
MISSING_TAG_HIERARCHY_EXCEPTION = "required tag hierarchy"
INVALID_START_END_VALUE_EXCEPTION = "end time should be greater than zero and less than start time"
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import logging
from logging.handlers import RotatingFileHandler
from scripts.common.config_parser import LOG_LEVEL, LOG_HANDLER_NAME, BASE_LOG_PATH
DEFAULT_FORMAT = '%(asctime)s %(levelname)5s %(name)s %(message)s'
DEBUG_FORMAT = '%(asctime)s %(levelname)5s %(name)s [%(threadName)5s:%(filename)5s:%(funcName)5s():%(lineno)s] %(' \
'message)s '
EXTRA = {}
FORMATTER = DEFAULT_FORMAT
if LOG_LEVEL.strip() == "DEBUG":
FORMATTER = DEBUG_FORMAT
logging.trace = logging.DEBUG - 5
logging.addLevelName(logging.DEBUG - 5, 'TRACE')
class ILensLogger(logging.getLoggerClass()):
def __init__(self, name):
super().__init__(name)
def trace(self, msg, *args, **kwargs):
if self.isEnabledFor(logging.trace):
self._log(logging.trace, msg, args, **kwargs)
def get_logger(log_handler_name):
"""
Purpose : To create logger .
:param log_handler_name: Name of the log handler.
:return: logger object.
"""
log_path = os.path.join(BASE_LOG_PATH, log_handler_name + ".log")
logging.setLoggerClass(ILensLogger)
_logger = logging.getLogger(log_handler_name)
_logger.setLevel(LOG_LEVEL.strip().upper())
log_handler = logging.StreamHandler()
log_handler.setLevel(LOG_LEVEL)
formatter = logging.Formatter(FORMATTER)
log_handler.setFormatter(formatter)
handler = RotatingFileHandler(log_path, maxBytes=10485760,
backupCount=5)
handler.setFormatter(formatter)
_logger.addHandler(log_handler)
_logger.addHandler(handler)
return _logger
logger = get_logger(LOG_HANDLER_NAME)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment