Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
D
dalmia_degradation_calculation
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
CI / CD Analytics
Repository Analytics
Value Stream Analytics
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
aakash.bedi
dalmia_degradation_calculation
Commits
c6fb4cc7
Commit
c6fb4cc7
authored
Feb 28, 2023
by
aakash.bedi
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
updated loggers
parent
646a534f
Pipeline
#59480
canceled with stage
Changes
4
Pipelines
1
Show whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
16 additions
and
8 deletions
+16
-8
app.py
app.py
+4
-5
scripts/core/engine/inv_and_mppt_level.py
scripts/core/engine/inv_and_mppt_level.py
+5
-0
scripts/core/engine/raw_predicted_tags.py
scripts/core/engine/raw_predicted_tags.py
+4
-0
scripts/core/engine/tags_data.py
scripts/core/engine/tags_data.py
+3
-3
No files found.
app.py
View file @
c6fb4cc7
...
...
@@ -24,20 +24,19 @@ start_date, end_date, start_timestamp, end_timestamp = KairosStartEndDate().star
def
get_tag_details
():
try
:
df_raw_tags
,
df_predicted_tags
=
get_raw_predicted_tags
()
logger
.
info
(
f
'raw tags dataframe shape - {df_raw_tags.shape}'
)
logger
.
info
(
f
'predicted tags dataframe shape - {df_predicted_tags.shape}'
)
df
=
get_tags_data
(
mppt_tags
=
df_raw_tags
,
start_timestamp
=
start_timestamp
,
end_timestamp
=
end_timestamp
)
df
=
get_tags_data
(
tags
=
df_raw_tags
,
start_timestamp
=
start_timestamp
,
end_timestamp
=
end_timestamp
)
logger
.
info
(
f
'Shape of final df - {df.shape}'
)
mppt_data
=
GetData
()
df_mppt
=
mppt_data
.
current_voltage_mppt_data
(
df
=
df
)
data_preprocessing
=
DataPreprocessing
()
df_mppt
=
data_preprocessing
.
remove_outliers
(
df
=
df_mppt
,
param_list
=
[
'tilt_irradiance'
,
'voltage_mppt'
,
'current_mppt'
])
df_mppt
,
df_train
,
df_test
=
data_preprocessing
.
train_test_split
(
df
=
df_mppt
)
get_training_inference
=
TrainingInference
(
df
=
df_mppt
,
df_train
=
df_train
,
df_test
=
df_test
)
ai_modelling
(
df_train
=
df_train
,
get_training_inference
=
get_training_inference
,
...
...
scripts/core/engine/inv_and_mppt_level.py
View file @
c6fb4cc7
...
...
@@ -20,6 +20,8 @@ class TrainingInference:
x_train
=
df_train_mppt
[[
'datetime'
,
'inv_id'
,
'mppt_id'
,
'hour'
,
'tilt_irradiance'
,
'voltage_mppt'
]]
y_train
=
df_train_mppt
[[
'current_mppt'
]]
logger
.
debug
(
f
'shape of x_train for {inv_id} & {mppt_id} - {x_train.shape}'
)
logger
.
debug
(
f
'shape of y_train for {inv_id} & {mppt_id} - {y_train.shape}'
)
x_train_std
,
scaler_x
=
data_preprocessing
.
get_standardized_data
(
df
=
x_train
,
param_list
=
[
'datetime'
,
'inv_id'
,
'mppt_id'
])
...
...
@@ -42,6 +44,9 @@ class TrainingInference:
df_test_mppt
.
reset_index
(
drop
=
True
,
inplace
=
True
)
x_test
=
df_test_mppt
[[
'datetime'
,
'inv_id'
,
'mppt_id'
,
'hour'
,
'tilt_irradiance'
,
'voltage_mppt'
]]
y_test
=
df_test_mppt
[[
'current_mppt'
]]
logger
.
debug
(
f
'shape of x_test for {inv_id} & {mppt_id} - {x_test.shape}'
)
logger
.
debug
(
f
'shape of y_test for {inv_id} & {mppt_id} - {y_test.shape}'
)
data_preprocessing
=
DataPreprocessing
()
x_test_std
=
data_preprocessing
.
get_transform_std_data
(
df
=
x_test
,
param_list
=
[
'datetime'
,
'inv_id'
,
'mppt_id'
],
...
...
scripts/core/engine/raw_predicted_tags.py
View file @
c6fb4cc7
...
...
@@ -11,15 +11,19 @@ def get_raw_predicted_tags():
try
:
mongo_conn
=
MongoConnect
(
uri
=
Mongo
.
mongo_uri
,
database
=
MongoConstants
.
db
,
collection
=
MongoConstants
.
collection
)
logger
.
debug
(
f
'mongo conn - {mongo_conn}'
)
raw_tags_dict
=
mongo_conn
.
find_one
({
"$and"
:
[{
"id"
:
"dalmia_string_level_tags"
},
{
"city"
:
"ariyalur"
},
{
"tags_property"
:
"raw"
}]})
req_tags
=
raw_tags_dict
[
'input_data'
]
logger
.
info
(
f
'raw tags dict - {req_tags}'
)
df_raw_tags
=
pd
.
DataFrame
.
from_dict
(
req_tags
,
orient
=
'index'
)
predicted_tags_dict
=
mongo_conn
.
find_one
({
"$and"
:
[{
"id"
:
"dalmia_string_level_tags"
},
{
"city"
:
"ariyalur"
},
{
"tags_property"
:
"predicted"
}]})
predicted_tags
=
predicted_tags_dict
[
'input_data'
]
logger
.
info
(
f
'predicted tags dict - {predicted_tags}'
)
df_predicted_tags
=
pd
.
DataFrame
.
from_dict
(
predicted_tags
,
orient
=
'index'
)
df_raw_tags
.
reset_index
(
inplace
=
True
)
df_raw_tags
.
rename
(
columns
=
{
'index'
:
'tag_name'
},
inplace
=
True
)
df_predicted_tags
.
reset_index
(
inplace
=
True
)
...
...
scripts/core/engine/tags_data.py
View file @
c6fb4cc7
...
...
@@ -6,12 +6,12 @@ from scripts.utils.reading_tags import GetTags
base_path
=
'data_folder'
def
get_tags_data
(
mppt_
tags
,
start_timestamp
,
end_timestamp
):
def
get_tags_data
(
tags
,
start_timestamp
,
end_timestamp
):
try
:
get_tags
=
GetTags
(
base_path
=
base_path
)
df_merged
=
pd
.
DataFrame
()
for
inv_id
in
list
(
mppt_
tags
[
'inv_id'
]
.
unique
()):
df_tags_id
=
get_tags
.
get_tags_id
(
df
=
mppt_
tags
,
inv_id
=
inv_id
)
for
inv_id
in
list
(
tags
[
'inv_id'
]
.
unique
()):
df_tags_id
=
get_tags
.
get_tags_id
(
df
=
tags
,
inv_id
=
inv_id
)
tags_dict
=
df_tags_id
[[
'tag_id'
,
'parameter_name'
]]
.
set_index
(
'tag_id'
)
.
T
.
to_dict
(
orient
=
"records"
)[
0
]
tags_dict
[
'site_107$dept_140$line_371$equipment_4115$tag_15828'
]
=
'tilt_irradiance'
df_data
=
KairosQuery
(
start_timestamp
=
start_timestamp
,
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment