Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
U
unify_api2
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
chaonan
unify_api2
Commits
de51a7ff
Commit
de51a7ff
authored
May 29, 2023
by
ZZH
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
remove es 2023-5-29
parent
220a3fe5
Hide whitespace changes
Inline
Side-by-side
Showing
11 changed files
with
124 additions
and
706 deletions
+124
-706
list_static_dao.py
unify_api/modules/alarm_manager/dao/list_static_dao.py
+0
-176
alarm_static_service.py
...api/modules/alarm_manager/service/alarm_static_service.py
+3
-67
alarm_static.py
unify_api/modules/alarm_manager/views/alarm_static.py
+3
-3
points.py
unify_api/modules/common/procedures/points.py
+7
-24
electric_service.py
unify_api/modules/electric/service/electric_service.py
+2
-2
electric.py
unify_api/modules/electric/views/electric.py
+2
-2
count_info_pds.py
unify_api/modules/home_page/procedures/count_info_pds.py
+87
-352
count_info_service.py
unify_api/modules/home_page/service/count_info_service.py
+7
-8
count_info.py
unify_api/modules/home_page/views/count_info.py
+4
-4
count_info_proxy.py
unify_api/modules/home_page/views/count_info_proxy.py
+1
-2
analysis_result_service.py
...pi/modules/shidianu/procedures/analysis_result_service.py
+8
-66
No files found.
unify_api/modules/alarm_manager/dao/list_static_dao.py
View file @
de51a7ff
...
@@ -264,57 +264,6 @@ async def sdu_alarm_importance_dao_new15(start, end, points, is_sdu=None):
...
@@ -264,57 +264,6 @@ async def sdu_alarm_importance_dao_new15(start, end, points, is_sdu=None):
async
def
sdu_alarm_behavior_dao
(
start
,
end
,
points
):
async
def
sdu_alarm_behavior_dao
(
start
,
end
,
points
):
"""用电行为统计, 目前只有违规电器
如果还有其他统计, 则可先根据type分组, 再根据appliance分组
"""
start_es
=
convert_es_str
(
start
)
end_es
=
convert_es_str
(
end
)
query_body
=
{
"size"
:
0
,
"query"
:
{
"bool"
:
{
"filter"
:
[
{
"terms"
:
{
"point_id"
:
points
}
},
{
"range"
:
{
"datetime"
:
{
"gte"
:
start_es
,
"lte"
:
end_es
}
}
},
{
"terms"
:
{
"type.keyword"
:
[
"illegal_ele_app"
]
}
}
]
}
},
"aggs"
:
{
"appliance"
:
{
"terms"
:
{
"field"
:
"appliance.keyword"
,
"size"
:
100
}
}
}
}
log
.
info
(
"alarm_behavior query_body={}"
.
format
(
query_body
))
async
with
EsUtil
()
as
es
:
es_result
=
await
es
.
search_origin
(
body
=
query_body
,
index
=
constants
.
POINT_1MIN_EVENT
)
return
es_result
[
"aggregations"
][
"appliance"
][
"buckets"
]
async
def
sdu_alarm_behavior_dao_new15
(
start
,
end
,
points
):
sql
=
f
"SELECT appliance, count(1) doc_count "
\
sql
=
f
"SELECT appliance, count(1) doc_count "
\
f
"FROM `point_1min_event` WHERE pid in
%
s "
\
f
"FROM `point_1min_event` WHERE pid in
%
s "
\
f
"and event_type = 'illegal_ele_app' and "
\
f
"and event_type = 'illegal_ele_app' and "
\
...
@@ -324,44 +273,6 @@ async def sdu_alarm_behavior_dao_new15(start, end, points):
...
@@ -324,44 +273,6 @@ async def sdu_alarm_behavior_dao_new15(start, end, points):
return
datas
return
datas
async
def
sdu_alarm_aggs_date
(
cid
):
"""sdu求安全运行
根据每日聚合,再根据points聚合
"""
query_body
=
{
"size"
:
0
,
"query"
:
{
"bool"
:
{
"must"
:
[
{
"term"
:
{
"cid"
:
cid
}
},
{
"terms"
:
{
"type.keyword"
:
SDU_ONE_TWO_GRADE_ALARM
}}
]
}
},
"aggs"
:
{
"date_day"
:
{
"date_histogram"
:
{
"field"
:
"datetime"
,
"interval"
:
"day"
,
"time_zone"
:
"+08:00"
,
"format"
:
"yyyy-MM-dd"
,
"min_doc_count"
:
0
}
}
}
}
log
.
info
(
"query_body={}"
.
format
(
query_body
))
async
with
EsUtil
()
as
es
:
es_result
=
await
es
.
search_origin
(
body
=
query_body
,
index
=
constants
.
POINT_1MIN_EVENT
)
return
es_result
[
"aggregations"
][
"date_day"
][
"buckets"
]
async
def
zdu_alarm_aggs_date_impotent
(
cid
,
start
,
end
):
async
def
zdu_alarm_aggs_date_impotent
(
cid
,
start
,
end
):
"""zdu求安全运行
"""zdu求安全运行
根据每日聚合,再根据等级聚合
根据每日聚合,再根据等级聚合
...
@@ -383,48 +294,6 @@ async def zdu_alarm_aggs_date_impotent(cid, start, end):
...
@@ -383,48 +294,6 @@ async def zdu_alarm_aggs_date_impotent(cid, start, end):
return
len
(
datas
)
if
datas
else
0
return
len
(
datas
)
if
datas
else
0
async
def
sdu_alarm_aggs_date_importance
(
cid
):
# todo: 扬尘es待改
"""按日期,再按等级聚合"""
query_body
=
{
"size"
:
0
,
"query"
:
{
"bool"
:
{
"must"
:
[
{
"term"
:
{
"cid"
:
cid
}
}
]
}
},
"aggs"
:
{
"date_day"
:
{
"date_histogram"
:
{
"field"
:
"datetime"
,
"interval"
:
"day"
,
"time_zone"
:
"+08:00"
,
"format"
:
"yyyy-MM-dd"
,
"min_doc_count"
:
0
},
"aggs"
:
{
"importance"
:
{
"terms"
:
{
"field"
:
"importance"
,
"size"
:
10
}
}
}
}
}
}
log
.
info
(
"query_body={}"
.
format
(
query_body
))
async
with
EsUtil
()
as
es
:
es_result
=
await
es
.
search_origin
(
body
=
query_body
,
index
=
constants
.
POINT_1MIN_EVENT
)
return
es_result
[
"aggregations"
][
"date_day"
][
"buckets"
]
async
def
sdu_alarm_aggs_type
(
cid
,
start
,
end
):
async
def
sdu_alarm_aggs_type
(
cid
,
start
,
end
):
sql
=
f
"""
sql
=
f
"""
SELECT
SELECT
...
@@ -444,51 +313,6 @@ async def sdu_alarm_aggs_type(cid, start, end):
...
@@ -444,51 +313,6 @@ async def sdu_alarm_aggs_type(cid, start, end):
return
datas
if
datas
else
[]
return
datas
if
datas
else
[]
async
def
sdu_alarm_aggs_type_old
(
cid
,
start
,
end
):
start_dt
=
datetime
.
strptime
(
start
,
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
end_dt
=
datetime
.
strptime
(
end
,
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
es_start_str
=
datetime
(
year
=
start_dt
.
year
,
month
=
start_dt
.
month
,
day
=
start_dt
.
day
)
.
strftime
(
"
%
Y-
%
m-
%
dT
%
H:
%
M:
%
S+08:00"
)
es_end_str
=
end_dt
.
strftime
(
"
%
Y-
%
m-
%
dT
%
H:
%
M:
%
S+08:00"
)
"""根据类型聚合"""
query_body
=
{
"size"
:
0
,
"query"
:
{
"bool"
:
{
"must"
:
[
{
"term"
:
{
"cid"
:
cid
}
},
{
"range"
:
{
"datetime"
:
{
"gte"
:
es_start_str
,
"lte"
:
es_end_str
}
}
}
]
}
},
"aggs"
:
{
"type"
:
{
"terms"
:
{
"field"
:
"type.keyword"
,
"size"
:
40
}
}
}
}
log
.
info
(
"query_body={}"
.
format
(
query_body
))
async
with
EsUtil
()
as
es
:
es_result
=
await
es
.
search_origin
(
body
=
query_body
,
index
=
constants
.
POINT_1MIN_EVENT
)
return
es_result
[
"aggregations"
][
"type"
][
"buckets"
]
async
def
alarm_aggs_importance
(
cid
,
start
,
end
):
async
def
alarm_aggs_importance
(
cid
,
start
,
end
):
"""按报警等级聚合"""
"""按报警等级聚合"""
sql
=
f
"""
sql
=
f
"""
...
...
unify_api/modules/alarm_manager/service/alarm_static_service.py
View file @
de51a7ff
...
@@ -5,10 +5,10 @@ from unify_api.modules.alarm_manager.components.alarm_static_cps import \
...
@@ -5,10 +5,10 @@ from unify_api.modules.alarm_manager.components.alarm_static_cps import \
ZsResp
,
TimeCount
,
ZasResp
ZsResp
,
TimeCount
,
ZasResp
from
unify_api.modules.alarm_manager.dao.list_static_dao
import
\
from
unify_api.modules.alarm_manager.dao.list_static_dao
import
\
sdu_alarm_statistics_dao
,
sdu_alarm_type_dao
,
sdu_alarm_importance_dao
,
\
sdu_alarm_statistics_dao
,
sdu_alarm_type_dao
,
sdu_alarm_importance_dao
,
\
sdu_alarm_
behavior_dao
,
sdu_alarm_
limit_type_dao
,
\
sdu_alarm_limit_type_dao
,
\
zdu_alarm_aggs_date_impotent
,
sdu_alarm_type_dao_new15
,
\
zdu_alarm_aggs_date_impotent
,
sdu_alarm_type_dao_new15
,
\
sdu_alarm_importance_dao_new15
,
sdu_alarm_statistics_dao_new15
,
\
sdu_alarm_importance_dao_new15
,
sdu_alarm_statistics_dao_new15
,
\
sdu_alarm_behavior_dao
_new15
,
sdu_alarm_limit_type_dao_new15
sdu_alarm_behavior_dao
,
sdu_alarm_limit_type_dao_new15
from
unify_api.modules.alarm_manager.procedures.alarm_static_pds
import
\
from
unify_api.modules.alarm_manager.procedures.alarm_static_pds
import
\
new_alarm_content_info
,
risk_distribution
,
zdu_summary_info
,
\
new_alarm_content_info
,
risk_distribution
,
zdu_summary_info
,
\
new_alarm_content_info_new15
,
risk_distribution_new15
,
\
new_alarm_content_info_new15
,
risk_distribution_new15
,
\
...
@@ -294,79 +294,15 @@ async def sdu_electric_behave_service(cid, start, end, storeys, product):
...
@@ -294,79 +294,15 @@ async def sdu_electric_behave_service(cid, start, end, storeys, product):
return
SebResp
(
return_data
=
return_list
)
return
SebResp
(
return_data
=
return_list
)
async
def
sdu_index_alarm_ranking_service
(
cid
,
start
,
end
,
product
):
async
def
sdu_index_alarm_rank
(
cid
,
start
,
end
,
product
):
"""首页-报警违规排名-新版识电u, 近30天"""
points
=
await
points_by_cid
([
cid
])
points
=
await
points_by_cid
([
cid
])
if
not
points
:
if
not
points
:
raise
ParamException
(
message
=
f
"{cid}没有points"
)
raise
ParamException
(
message
=
f
"{cid}没有points"
)
point_list
=
[
i
[
"pid"
]
for
i
in
points
]
point_list
=
[
i
[
"pid"
]
for
i
in
points
]
points_map
=
{
i
[
"pid"
]:
i
[
"name"
]
for
i
in
points
}
points_map
=
{
i
[
"pid"
]:
i
[
"name"
]
for
i
in
points
}
# 1. 违规电器排名
# 1. 违规电器排名
behavior_res
=
await
sdu_alarm_behavior_dao
(
start
,
end
,
point_list
)
behavior_res
=
await
sdu_alarm_behavior_dao
(
start
,
end
,
point_list
)
behavior_illegal_app
=
[]
behavior_illegal_app
=
[]
if
behavior_res
:
for
i
in
behavior_res
:
tmp_dic
=
{
"name"
:
i
[
"key"
],
"value"
:
i
[
"doc_count"
]}
behavior_illegal_app
.
append
(
tmp_dic
)
behavior_illegal_app
=
sorted
(
behavior_illegal_app
,
key
=
lambda
x
:
x
[
"value"
],
reverse
=
True
)
# 2. 报警排名, 违规行为
es_type_res
=
await
sdu_alarm_limit_type_dao
(
start
,
end
,
point_list
)
alarm_ranking
=
[]
illegal_behavior
=
[]
for
buck
in
es_type_res
:
point_name
=
points_map
.
get
(
buck
[
"key"
])
# 具体报警等级
# power_quality_low:能质量偏低--III级报警
# ele_overload: 线路过载--II级报警
# illegal_ele_app: 违规电器接入--I级报警
im1
,
im2
,
im3
=
0
,
0
,
0
if
buck
.
get
(
"type"
)
and
buck
.
get
(
"type"
)
.
get
(
"buckets"
):
for
im
in
buck
[
"type"
][
"buckets"
]:
im3
=
im
[
"doc_count"
]
\
if
im
[
"key"
]
==
"power_quality_low"
else
im3
im2
=
im
[
"doc_count"
]
\
if
im
[
"key"
]
==
"ele_overload"
else
im2
im1
=
im
[
"doc_count"
]
\
if
im
[
"key"
]
==
"illegal_ele_app"
else
im1
# 报警排名
alarm_dic
=
{
"name"
:
point_name
,
"value"
:
buck
[
"doc_count"
],
"im1"
:
im1
,
"im2"
:
im2
,
"im3"
:
im3
}
alarm_ranking
.
append
(
alarm_dic
)
# 违规行为
illegal_count
=
0
type_buck
=
buck
[
"type"
][
"buckets"
]
for
i
in
type_buck
:
if
i
.
get
(
"key"
)
==
"illegal_ele_app"
:
illegal_count
=
i
.
get
(
"doc_count"
)
illegal_dic
=
{
"name"
:
point_name
,
"value"
:
illegal_count
}
illegal_behavior
.
append
(
illegal_dic
)
# 3. 排序
if
len
(
alarm_ranking
)
>
1
:
alarm_ranking
=
sorted
(
alarm_ranking
,
key
=
lambda
x
:
x
[
"value"
],
reverse
=
True
)
if
len
(
illegal_behavior
)
>
1
:
illegal_behavior
=
sorted
(
illegal_behavior
,
key
=
lambda
x
:
x
[
"value"
],
reverse
=
True
)
return
SiarResp
(
illegal_app
=
behavior_illegal_app
[:
5
],
illegal_behavior
=
illegal_behavior
[:
5
],
alarm_ranking
=
alarm_ranking
[:
5
]
)
async
def
sdu_index_alarm_ranking_service_new15
(
cid
,
start
,
end
,
product
):
points
=
await
points_by_cid
([
cid
])
if
not
points
:
raise
ParamException
(
message
=
f
"{cid}没有points"
)
point_list
=
[
i
[
"pid"
]
for
i
in
points
]
points_map
=
{
i
[
"pid"
]:
i
[
"name"
]
for
i
in
points
}
# 1. 违规电器排名
behavior_res
=
await
sdu_alarm_behavior_dao_new15
(
start
,
end
,
point_list
)
behavior_illegal_app
=
[]
if
behavior_res
:
if
behavior_res
:
for
i
in
behavior_res
:
for
i
in
behavior_res
:
tmp_dic
=
{
"name"
:
i
[
"appliance"
],
"value"
:
i
[
"doc_count"
]}
tmp_dic
=
{
"name"
:
i
[
"appliance"
],
"value"
:
i
[
"doc_count"
]}
...
...
unify_api/modules/alarm_manager/views/alarm_static.py
View file @
de51a7ff
...
@@ -9,11 +9,11 @@ from unify_api.modules.alarm_manager.procedures.alarm_static_pds import \
...
@@ -9,11 +9,11 @@ from unify_api.modules.alarm_manager.procedures.alarm_static_pds import \
from
unify_api.modules.alarm_manager.service.alarm_static_service
import
\
from
unify_api.modules.alarm_manager.service.alarm_static_service
import
\
sdu_alarm_statistics_service
,
sdu_alarm_statistics_sort_service
,
\
sdu_alarm_statistics_service
,
sdu_alarm_statistics_sort_service
,
\
sdu_app_statistics_sort_service
,
sdu_electric_behave_service
,
\
sdu_app_statistics_sort_service
,
sdu_electric_behave_service
,
\
sdu_index_alarm_ranking_service
,
zdu_level_distribution_service
,
\
zdu_level_distribution_service
,
\
zdu_content_distribution_service
,
zdu_summary_service
,
\
zdu_content_distribution_service
,
zdu_summary_service
,
\
zdu_alarm_sort_service_2
,
sdu_alarm_statistics_sort_service_new15
,
\
zdu_alarm_sort_service_2
,
sdu_alarm_statistics_sort_service_new15
,
\
sdu_app_statistics_sort_service_new15
,
\
sdu_app_statistics_sort_service_new15
,
\
sdu_index_alarm_rank
ing_service_new15
sdu_index_alarm_rank
from
unify_api.modules.home_page.components.security_info_cps
import
\
from
unify_api.modules.home_page.components.security_info_cps
import
\
SecurityCountResp
,
AlarmContentDistributionResp
SecurityCountResp
,
AlarmContentDistributionResp
from
unify_api.modules.home_page.procedures.count_info_pds
import
\
from
unify_api.modules.home_page.procedures.count_info_pds
import
\
...
@@ -177,7 +177,7 @@ async def post_sdu_index_alarm_ranking(req, body: SiasReq) -> SiarResp:
...
@@ -177,7 +177,7 @@ async def post_sdu_index_alarm_ranking(req, body: SiasReq) -> SiarResp:
product
=
body
.
product
product
=
body
.
product
# 最近30天, 不包含今天
# 最近30天, 不包含今天
start
,
end
=
last30_day_range
()
start
,
end
=
last30_day_range
()
return
await
sdu_index_alarm_rank
ing_service_new15
(
cid
,
start
,
end
,
product
)
return
await
sdu_index_alarm_rank
(
cid
,
start
,
end
,
product
)
@
summary
(
"报警统计-报警等级-智电u"
)
@
summary
(
"报警统计-报警等级-智电u"
)
...
...
unify_api/modules/common/procedures/points.py
View file @
de51a7ff
...
@@ -21,26 +21,26 @@ async def get_points(company_ids):
...
@@ -21,26 +21,26 @@ async def get_points(company_ids):
company_point_ids_map
=
defaultdict
(
list
)
company_point_ids_map
=
defaultdict
(
list
)
for
point
in
points
:
for
point
in
points
:
company_point_ids_map
[
point
[
"cid"
]]
.
append
(
point
[
"pid"
])
company_point_ids_map
[
point
[
"cid"
]]
.
append
(
point
[
"pid"
])
point_map
=
{
i
[
"pid"
]:
i
for
i
in
points
}
point_map
=
{
i
[
"pid"
]:
i
for
i
in
points
}
point_ids
=
list
(
point_map
.
keys
())
point_ids
=
list
(
point_map
.
keys
())
pid_field
,
start_time_field
=
"pid"
,
"start_time"
pid_field
,
start_time_field
=
"pid"
,
"start_time"
sql
=
f
"SELECT pid, mid FROM change_meter_record WHERE pid in
%
s ORDER BY {pid_field}, {start_time_field}"
sql
=
f
"SELECT pid, mid FROM change_meter_record WHERE pid in
%
s ORDER BY {pid_field}, {start_time_field}"
records
=
await
conn
.
fetchall
(
sql
,
args
=
(
point_ids
,))
records
=
await
conn
.
fetchall
(
sql
,
args
=
(
point_ids
,))
newest_point_meter_relation
=
{
i
[
"pid"
]:
i
[
"mid"
]
for
i
in
records
if
newest_point_meter_relation
=
{
i
[
"pid"
]:
i
[
"mid"
]
for
i
in
records
if
i
[
"mid"
]}
i
[
"mid"
]}
valid_mids
=
list
(
newest_point_meter_relation
.
values
())
valid_mids
=
list
(
newest_point_meter_relation
.
values
())
newest_record_map
=
{
i
[
"pid"
]:
point_map
.
get
(
i
[
"pid"
])
for
i
in
records
newest_record_map
=
{
i
[
"pid"
]:
point_map
.
get
(
i
[
"pid"
])
for
i
in
records
if
i
[
"mid"
]}
if
i
[
"mid"
]}
# 根据有效的meter id查询meter参数
# 根据有效的meter id查询meter参数
async
with
MysqlUtil
()
as
conn
:
async
with
MysqlUtil
()
as
conn
:
mid_field
,
start_time_field
=
"mid"
,
"start_time"
mid_field
,
start_time_field
=
"mid"
,
"start_time"
mp_sql
=
f
"SELECT vc, mid, ctnum FROM meter_param_record WHERE mid in
%
s ORDER BY {mid_field}, {start_time_field}"
mp_sql
=
f
"SELECT vc, mid, ctnum FROM meter_param_record WHERE mid in
%
s ORDER BY {mid_field}, {start_time_field}"
mps
=
await
conn
.
fetchall
(
mp_sql
,
args
=
(
valid_mids
,))
mps
=
await
conn
.
fetchall
(
mp_sql
,
args
=
(
valid_mids
,))
meter_param_map
=
{
i
[
"mid"
]:
i
for
i
in
mps
}
meter_param_map
=
{
i
[
"mid"
]:
i
for
i
in
mps
}
for
cid
,
point_ids
in
company_point_ids_map
.
items
():
for
cid
,
point_ids
in
company_point_ids_map
.
items
():
for
point_id
in
point_ids
:
for
point_id
in
point_ids
:
if
point_id
in
newest_record_map
:
if
point_id
in
newest_record_map
:
...
@@ -85,23 +85,6 @@ async def proxy_points(cid_list):
...
@@ -85,23 +85,6 @@ async def proxy_points(cid_list):
async
def
get_meter_by_point
(
point_id
):
async
def
get_meter_by_point
(
point_id
):
"""
通过point_id获取sid
:param point_id:
:return: sid
"""
async
with
MysqlUtil
()
as
conn
:
sql
=
"SELECT mid from change_meter_record where pid =
%
s order by start_time desc limit 1"
point_meter_info
=
await
conn
.
fetchone
(
sql
,
args
=
(
point_id
,))
if
not
point_meter_info
:
return
None
newest_mid
=
point_meter_info
[
"mid"
]
meter_sql
=
"SELECT sid, meter_no from meter where mid =
%
s"
meter_info
=
await
conn
.
fetchone
(
meter_sql
,
args
=
(
newest_mid
,))
return
meter_info
async
def
get_meter_by_point_new15
(
point_id
):
"""
"""
根据point获取设备数据
根据point获取设备数据
"""
"""
...
@@ -122,7 +105,7 @@ async def list_point(cid):
...
@@ -122,7 +105,7 @@ async def list_point(cid):
for
res
in
result
:
for
res
in
result
:
pid
=
res
.
get
(
"pid"
)
pid
=
res
.
get
(
"pid"
)
points
[
pid
]
=
res
points
[
pid
]
=
res
sql
=
"SELECT id, `group`, item FROM location WHERE cid=
%
s and `type` in
%
s"
sql
=
"SELECT id, `group`, item FROM location WHERE cid=
%
s and `type` in
%
s"
async
with
MysqlUtil
()
as
conn
:
async
with
MysqlUtil
()
as
conn
:
result
=
await
conn
.
fetchall
(
sql
,
args
=
(
result
=
await
conn
.
fetchall
(
sql
,
args
=
(
...
@@ -132,7 +115,7 @@ async def list_point(cid):
...
@@ -132,7 +115,7 @@ async def list_point(cid):
group
=
res
.
get
(
"group"
)
group
=
res
.
get
(
"group"
)
item
=
res
.
get
(
"item"
)
item
=
res
.
get
(
"item"
)
groups
.
setdefault
(
group
,
[])
.
append
((
id
,
item
))
groups
.
setdefault
(
group
,
[])
.
append
((
id
,
item
))
for
pid
,
point_info
in
points
.
items
():
for
pid
,
point_info
in
points
.
items
():
name
=
point_info
.
get
(
"name"
)
name
=
point_info
.
get
(
"name"
)
add_to_company
=
point_info
[
"add_to_company"
]
add_to_company
=
point_info
[
"add_to_company"
]
...
@@ -146,7 +129,7 @@ async def list_point(cid):
...
@@ -146,7 +129,7 @@ async def list_point(cid):
comm_point
=
{
"name"
:
name
,
"point_id"
:
pid
,
"locations"
:
locations
,
comm_point
=
{
"name"
:
name
,
"point_id"
:
pid
,
"locations"
:
locations
,
"add_to_company"
:
add_to_company
}
"add_to_company"
:
add_to_company
}
list_point
.
append
(
comm_point
)
list_point
.
append
(
comm_point
)
async
with
MysqlUtil
()
as
conn
:
async
with
MysqlUtil
()
as
conn
:
sql
=
"SELECT inlid, `name` FROM inline WHERE cid=
%
s"
sql
=
"SELECT inlid, `name` FROM inline WHERE cid=
%
s"
inlines
=
await
conn
.
fetchall
(
sql
,
args
=
(
cid
,))
inlines
=
await
conn
.
fetchall
(
sql
,
args
=
(
cid
,))
...
...
unify_api/modules/electric/service/electric_service.py
View file @
de51a7ff
...
@@ -5,7 +5,7 @@ from pot_libs.utils.exc_util import BusinessException
...
@@ -5,7 +5,7 @@ from pot_libs.utils.exc_util import BusinessException
from
unify_api.constants
import
POINT_LEVEL_MAP
,
U_THRESHOLD
,
COSTTL_THRESHOLD
,
\
from
unify_api.constants
import
POINT_LEVEL_MAP
,
U_THRESHOLD
,
COSTTL_THRESHOLD
,
\
LF_THRESHOLD
,
THDU_THRESHOLD
,
BL_THRESHOLD
,
THDI_THRESHOLD
LF_THRESHOLD
,
THDU_THRESHOLD
,
BL_THRESHOLD
,
THDI_THRESHOLD
from
unify_api.modules.common.procedures.points
import
points_by_storeys
,
\
from
unify_api.modules.common.procedures.points
import
points_by_storeys
,
\
get_meter_by_point
_new15
get_meter_by_point
from
unify_api.modules.common.service.td_engine_service
import
\
from
unify_api.modules.common.service.td_engine_service
import
\
get_td_engine_data
get_td_engine_data
from
unify_api.modules.electric.dao.electric_dao
import
\
from
unify_api.modules.electric.dao.electric_dao
import
\
...
@@ -899,7 +899,7 @@ async def elec_index_service_new15(cid, point_id, start, end):
...
@@ -899,7 +899,7 @@ async def elec_index_service_new15(cid, point_id, start, end):
async
def
elec_current_service_new15
(
point_id
):
async
def
elec_current_service_new15
(
point_id
):
# 获取mtid
# 获取mtid
meter_info
=
await
get_meter_by_point
_new15
(
point_id
)
meter_info
=
await
get_meter_by_point
(
point_id
)
if
not
meter_info
:
if
not
meter_info
:
raise
BusinessException
(
raise
BusinessException
(
message
=
"没有该监测点的monitor信息,请联系运维人员!"
)
message
=
"没有该监测点的monitor信息,请联系运维人员!"
)
...
...
unify_api/modules/electric/views/electric.py
View file @
de51a7ff
...
@@ -9,7 +9,7 @@ import re
...
@@ -9,7 +9,7 @@ import re
from
pot_libs.settings
import
SETTING
from
pot_libs.settings
import
SETTING
from
datetime
import
datetime
from
datetime
import
datetime
from
unify_api.modules.common.procedures.points
import
get_meter_by_point
_new15
from
unify_api.modules.common.procedures.points
import
get_meter_by_point
from
unify_api.modules.common.service.td_engine_service
import
\
from
unify_api.modules.common.service.td_engine_service
import
\
get_td_engine_data
get_td_engine_data
...
@@ -873,7 +873,7 @@ async def post_qual_current(req, body: PageRequest) -> QualCurrentResponse:
...
@@ -873,7 +873,7 @@ async def post_qual_current(req, body: PageRequest) -> QualCurrentResponse:
raise
ParamException
(
raise
ParamException
(
message
=
"param exception, equals is NULL, no point_id"
)
message
=
"param exception, equals is NULL, no point_id"
)
# 获取mtid
# 获取mtid
meter_info
=
await
get_meter_by_point
_new15
(
point_id
)
meter_info
=
await
get_meter_by_point
(
point_id
)
if
not
meter_info
:
if
not
meter_info
:
raise
BusinessException
(
raise
BusinessException
(
message
=
"没有该监测点的monitor信息,请联系运维人员!"
)
message
=
"没有该监测点的monitor信息,请联系运维人员!"
)
...
...
unify_api/modules/home_page/procedures/count_info_pds.py
View file @
de51a7ff
...
@@ -58,7 +58,7 @@ async def other_info(company_id):
...
@@ -58,7 +58,7 @@ async def other_info(company_id):
GROUP BY
GROUP BY
DATE(pevent.event_datetime)
DATE(pevent.event_datetime)
"""
"""
now_time
=
datetime
.
now
()
now_time
=
datetime
.
now
()
# 获取到工厂安装时间create_time
# 获取到工厂安装时间create_time
async
with
MysqlUtil
()
as
conn
:
async
with
MysqlUtil
()
as
conn
:
...
@@ -67,7 +67,7 @@ async def other_info(company_id):
...
@@ -67,7 +67,7 @@ async def other_info(company_id):
company
=
await
conn
.
fetchone
(
company_sql
,
(
company_id
,))
company
=
await
conn
.
fetchone
(
company_sql
,
(
company_id
,))
create_time_timestamp
=
company
[
"create_time"
]
create_time_timestamp
=
company
[
"create_time"
]
create_time
=
datetime
.
fromtimestamp
(
create_time_timestamp
)
create_time
=
datetime
.
fromtimestamp
(
create_time_timestamp
)
today_alarm_count
=
0
today_alarm_count
=
0
alarm_count
=
0
alarm_count
=
0
if
not
alarm_data
:
if
not
alarm_data
:
...
@@ -78,7 +78,7 @@ async def other_info(company_id):
...
@@ -78,7 +78,7 @@ async def other_info(company_id):
# 系统安全运行天数: 当前时间 - 工厂安装时间 + 1
# 系统安全运行天数: 当前时间 - 工厂安装时间 + 1
safe_run_days
=
(
now_time
-
create_time
)
.
days
+
1
safe_run_days
=
(
now_time
-
create_time
)
.
days
+
1
return
today_alarm_count
,
safe_run_days
,
alarm_count
return
today_alarm_count
,
safe_run_days
,
alarm_count
# 5. 构造返回
# 5. 构造返回
# 如果每天都有报警, 防止安全运行天数-1天, 所以total_days +2
# 如果每天都有报警, 防止安全运行天数-1天, 所以total_days +2
total_days
=
(
now_time
-
create_time
)
.
days
+
2
total_days
=
(
now_time
-
create_time
)
.
days
+
2
...
@@ -125,20 +125,20 @@ async def other_info_old(company_id):
...
@@ -125,20 +125,20 @@ async def other_info_old(company_id):
}
}
},
},
}
}
async
with
EsUtil
()
as
es
:
async
with
EsUtil
()
as
es
:
es_result
=
await
es
.
search_origin
(
body
=
query_body
,
es_result
=
await
es
.
search_origin
(
body
=
query_body
,
index
=
constants
.
POINT_1MIN_EVENT
)
index
=
constants
.
POINT_1MIN_EVENT
)
now_time
=
datetime
.
now
()
now_time
=
datetime
.
now
()
# 获取到工厂安装时间create_time
# 获取到工厂安装时间create_time
async
with
MysqlUtil
()
as
conn
:
async
with
MysqlUtil
()
as
conn
:
company_sql
=
"select create_time from company where cid =
%
s"
company_sql
=
"select create_time from company where cid =
%
s"
company
=
await
conn
.
fetchone
(
company_sql
,
(
company_id
,))
company
=
await
conn
.
fetchone
(
company_sql
,
(
company_id
,))
create_time_timestamp
=
company
[
"create_time"
]
create_time_timestamp
=
company
[
"create_time"
]
create_time
=
datetime
.
fromtimestamp
(
create_time_timestamp
)
create_time
=
datetime
.
fromtimestamp
(
create_time_timestamp
)
today_alarm_count
=
0
today_alarm_count
=
0
alarm_count
=
0
alarm_count
=
0
date_buckets
=
es_result
.
get
(
"aggregations"
,
{})
.
get
(
"date_alarms"
,
date_buckets
=
es_result
.
get
(
"aggregations"
,
{})
.
get
(
"date_alarms"
,
...
@@ -152,7 +152,7 @@ async def other_info_old(company_id):
...
@@ -152,7 +152,7 @@ async def other_info_old(company_id):
# 系统安全运行天数: 当前时间 - 工厂安装时间 + 1
# 系统安全运行天数: 当前时间 - 工厂安装时间 + 1
safe_run_days
=
(
now_time
-
create_time
)
.
days
+
1
safe_run_days
=
(
now_time
-
create_time
)
.
days
+
1
return
today_alarm_count
,
safe_run_days
,
alarm_count
return
today_alarm_count
,
safe_run_days
,
alarm_count
# 5. 构造返回
# 5. 构造返回
# 如果每天都有报警, 防止安全运行天数-1天, 所以total_days +2
# 如果每天都有报警, 防止安全运行天数-1天, 所以total_days +2
total_days
=
(
now_time
-
create_time
)
.
days
+
2
total_days
=
(
now_time
-
create_time
)
.
days
+
2
...
@@ -210,124 +210,7 @@ def datetime_to_timestamp(dt):
...
@@ -210,124 +210,7 @@ def datetime_to_timestamp(dt):
return
ans_time
return
ans_time
async
def
electric_use_info
(
company_id
):
async
def
electric_use_info
(
cid
):
"""
用电安全指数
:param company_id:
:return:
"""
now
=
datetime
.
now
()
end_timestamp
=
datetime_to_timestamp
(
now
)
start_timestamp
=
datetime_to_timestamp
(
datetime
(
now
.
year
,
now
.
month
,
now
.
day
)
-
timedelta
(
30
))
score_events
=
[
i
for
i
in
EVENT_TYPE_MAP
.
keys
()
if
i
not
in
[
"overTemp"
,
"overTempRange1min"
,
"overTempRange15min"
,
"overTempTrendDaily"
,
"overTempTrendQuarterly"
,
"over_gap_u"
,
"over_rms_u"
,
"over_gap_i"
,
"over_rms_i"
,
"under_rms_u"
,
"over_res_cur"
]
]
query_body
=
{
"query"
:
{
"bool"
:
{
"filter"
:
[
{
"term"
:
{
"cid"
:
company_id
}},
{
"range"
:
{
"time"
:
{
"gte"
:
start_timestamp
,
"lte"
:
end_timestamp
,
}}},
],
}
},
"size"
:
0
,
"aggs"
:
{
# 这里之所以分score_aggs和alarm_aggs是因为有些事件不计入报警分,但是报警统计又必须要有
"score_aggs"
:
{
"filter"
:
{
"terms"
:
{
"type.keyword"
:
score_events
,
}},
"aggs"
:
{
"types"
:
{
"terms"
:
{
"field"
:
"importance"
}}},
},
"alarm_aggs"
:
{
"filter"
:
{
"term"
:
{
"cid"
:
company_id
}},
"aggs"
:
{
"types"
:
{
"terms"
:
{
"field"
:
"importance"
}}},
},
},
}
log
.
info
(
"cal_score_safe_electric query_body={}"
.
format
(
query_body
))
async
with
EsUtil
()
as
es
:
es_result
=
await
es
.
search_origin
(
body
=
query_body
,
index
=
constants
.
POINT_1MIN_EVENT
)
score_buckets
=
(
es_result
.
get
(
"aggregations"
,
{})
.
get
(
"score_aggs"
,
{})
.
get
(
"types"
,
{})
.
get
(
"buckets"
,
[])
)
first_alarm_cnt
=
0
second_alarm_cnt
=
0
third_alarm_cnt
=
0
for
bucket
in
score_buckets
:
if
bucket
[
"key"
]
==
Importance
.
First
.
value
:
first_alarm_cnt
+=
bucket
[
"doc_count"
]
elif
bucket
[
"key"
]
==
Importance
.
Second
.
value
:
second_alarm_cnt
+=
bucket
[
"doc_count"
]
elif
bucket
[
"key"
]
==
Importance
.
Third
.
value
:
third_alarm_cnt
+=
bucket
[
"doc_count"
]
company_point_map
=
await
get_points
([
company_id
])
point_len
=
len
(
company_point_map
.
get
(
company_id
)
or
{})
alarm_score
=
(
(
first_alarm_cnt
*
2
+
second_alarm_cnt
*
1
+
third_alarm_cnt
*
0.5
)
/
point_len
if
point_len
else
0
)
log
.
info
(
f
"company_point_map:{company_point_map}, point_len:{point_len}, "
f
"alarm_score:{alarm_score}"
)
if
alarm_score
>=
15
:
alarm_score
=
15
electric_use_score
=
get_electric_index
(
alarm_score
)
log
.
info
(
"point_len={} alarm_score={} electric_use_score={}"
.
format
(
point_len
,
alarm_score
,
electric_use_score
)
)
alarm_buckets
=
(
es_result
.
get
(
"aggregations"
,
{})
.
get
(
"alarm_aggs"
,
{})
.
get
(
"types"
,
{})
.
get
(
"buckets"
,
[])
)
first_alarm_cnt
,
second_alarm_cnt
,
third_alarm_cnt
=
0
,
0
,
0
for
bucket
in
alarm_buckets
:
if
bucket
[
"key"
]
==
Importance
.
First
.
value
:
first_alarm_cnt
+=
bucket
[
"doc_count"
]
elif
bucket
[
"key"
]
==
Importance
.
Second
.
value
:
second_alarm_cnt
+=
bucket
[
"doc_count"
]
elif
bucket
[
"key"
]
==
Importance
.
Third
.
value
:
third_alarm_cnt
+=
bucket
[
"doc_count"
]
return
ElectricInfo
(
first_alarm_cnt
=
first_alarm_cnt
,
second_alarm_cnt
=
second_alarm_cnt
,
third_alarm_cnt
=
third_alarm_cnt
,
alarm_score
=
alarm_score
,
electric_use_score
=
electric_use_score
,
)
async
def
electric_use_info_new15
(
cid
):
"""1.5用电安全指数"""
"""1.5用电安全指数"""
now
=
str
(
datetime
.
now
())
now
=
str
(
datetime
.
now
())
start
=
str
(
datetime
.
now
()
-
timedelta
(
30
))
start
=
str
(
datetime
.
now
()
-
timedelta
(
30
))
...
@@ -395,27 +278,27 @@ async def normal_rate_of_location(company_id):
...
@@ -395,27 +278,27 @@ async def normal_rate_of_location(company_id):
)
)
for
location
in
locations
:
for
location
in
locations
:
location_map
[
location
[
"id"
]]
=
location
location_map
[
location
[
"id"
]]
=
location
# todo批量hmget
# todo批量hmget
count_info_map
=
{
count_info_map
=
{
"residual_current"
:
{
"total"
:
0
,
"normal"
:
0
},
"residual_current"
:
{
"total"
:
0
,
"normal"
:
0
},
"temperature"
:
{
"total"
:
0
,
"normal"
:
0
},
"temperature"
:
{
"total"
:
0
,
"normal"
:
0
},
}
}
print
(
f
"len(location_map)={len(location_map)}"
)
print
(
f
"len(location_map)={len(location_map)}"
)
location_ids
=
list
(
location_map
.
keys
())
location_ids
=
list
(
location_map
.
keys
())
adio_currents
=
[]
adio_currents
=
[]
if
location_ids
:
if
location_ids
:
adio_currents
=
await
RedisUtils
()
.
hmget
(
"adio_current"
,
adio_currents
=
await
RedisUtils
()
.
hmget
(
"adio_current"
,
*
location_ids
)
*
location_ids
)
adio_info_map
=
{}
adio_info_map
=
{}
for
index
,
item_byte
in
enumerate
(
adio_currents
):
for
index
,
item_byte
in
enumerate
(
adio_currents
):
if
item_byte
:
if
item_byte
:
item
=
json
.
loads
(
item_byte
.
decode
())
item
=
json
.
loads
(
item_byte
.
decode
())
adio_info_map
[
location_ids
[
index
]]
=
item
adio_info_map
[
location_ids
[
index
]]
=
item
for
location_id
,
location_info
in
location_map
.
items
():
for
location_id
,
location_info
in
location_map
.
items
():
audio_info
=
adio_info_map
.
get
(
location_id
)
audio_info
=
adio_info_map
.
get
(
location_id
)
count_info_map
[
location_info
[
"type"
]][
"total"
]
+=
1
count_info_map
[
location_info
[
"type"
]][
"total"
]
+=
1
...
@@ -426,7 +309,7 @@ async def normal_rate_of_location(company_id):
...
@@ -426,7 +309,7 @@ async def normal_rate_of_location(company_id):
# 超过4小时的值不统计在normal里
# 超过4小时的值不统计在normal里
log
.
warn
(
f
"adio_current location_id={location_id} has expire!"
)
log
.
warn
(
f
"adio_current location_id={location_id} has expire!"
)
continue
continue
print
(
print
(
"threshold={} location_info['type'] = {} audio_info['value']={}"
.
format
(
"threshold={} location_info['type'] = {} audio_info['value']={}"
.
format
(
location_info
[
"threshold"
],
location_info
[
"type"
],
location_info
[
"threshold"
],
location_info
[
"type"
],
...
@@ -453,7 +336,7 @@ async def normal_rate_of_location(company_id):
...
@@ -453,7 +336,7 @@ async def normal_rate_of_location(company_id):
)
)
+
"
%
"
+
"
%
"
)
)
if
count_info_map
[
"residual_current"
][
"total"
]
==
0
:
if
count_info_map
[
"residual_current"
][
"total"
]
==
0
:
residual_current_qr
=
"100
%
"
residual_current_qr
=
"100
%
"
else
:
else
:
...
@@ -469,7 +352,7 @@ async def normal_rate_of_location(company_id):
...
@@ -469,7 +352,7 @@ async def normal_rate_of_location(company_id):
)
)
+
"
%
"
+
"
%
"
)
)
return
temperature_qr
,
residual_current_qr
return
temperature_qr
,
residual_current_qr
...
@@ -500,7 +383,7 @@ async def normal_rate_of_location_new15(cid):
...
@@ -500,7 +383,7 @@ async def normal_rate_of_location_new15(cid):
if
item_byte
:
if
item_byte
:
item
=
json
.
loads
(
item_byte
.
decode
())
item
=
json
.
loads
(
item_byte
.
decode
())
adio_info_map
[
location_ids
[
index
]]
=
item
adio_info_map
[
location_ids
[
index
]]
=
item
for
location_id
,
location_info
in
location_map
.
items
():
for
location_id
,
location_info
in
location_map
.
items
():
audio_info
=
adio_info_map
.
get
(
location_id
)
audio_info
=
adio_info_map
.
get
(
location_id
)
count_info_map
[
location_info
[
"type"
]][
"total"
]
+=
1
count_info_map
[
location_info
[
"type"
]][
"total"
]
+=
1
...
@@ -531,7 +414,7 @@ async def normal_rate_of_location_new15(cid):
...
@@ -531,7 +414,7 @@ async def normal_rate_of_location_new15(cid):
)
)
+
"
%
"
+
"
%
"
)
)
if
count_info_map
[
"residual_current"
][
"total"
]
==
0
:
if
count_info_map
[
"residual_current"
][
"total"
]
==
0
:
residual_current_qr
=
"100
%
"
residual_current_qr
=
"100
%
"
else
:
else
:
...
@@ -547,7 +430,7 @@ async def normal_rate_of_location_new15(cid):
...
@@ -547,7 +430,7 @@ async def normal_rate_of_location_new15(cid):
)
)
+
"
%
"
+
"
%
"
)
)
return
temperature_qr
,
residual_current_qr
return
temperature_qr
,
residual_current_qr
...
@@ -562,10 +445,10 @@ async def current_load(company_id):
...
@@ -562,10 +445,10 @@ async def current_load(company_id):
"and add_to_company = 1"
"and add_to_company = 1"
points
=
await
conn
.
fetchall
(
point_sql
,
args
=
(
company_id
,))
points
=
await
conn
.
fetchall
(
point_sql
,
args
=
(
company_id
,))
point_ids
=
[
p
[
"pid"
]
for
p
in
points
]
point_ids
=
[
p
[
"pid"
]
for
p
in
points
]
if
not
point_ids
:
if
not
point_ids
:
return
""
return
""
async
with
MysqlUtil
()
as
conn
:
async
with
MysqlUtil
()
as
conn
:
meter_sql
=
(
meter_sql
=
(
"SELECT pid, mid FROM change_meter_record WHERE pid in
%
s ORDER BY pid, start_time"
"SELECT pid, mid FROM change_meter_record WHERE pid in
%
s ORDER BY pid, start_time"
...
@@ -575,15 +458,15 @@ async def current_load(company_id):
...
@@ -575,15 +458,15 @@ async def current_load(company_id):
# 正序排序,最后这个map存储的是按照start_time是最近的mid
# 正序排序,最后这个map存储的是按照start_time是最近的mid
change_meter_map
=
{
m
[
"pid"
]:
m
[
"mid"
]
for
m
in
change_meters
if
change_meter_map
=
{
m
[
"pid"
]:
m
[
"mid"
]
for
m
in
change_meters
if
m
[
"mid"
]
is
not
None
}
m
[
"mid"
]
is
not
None
}
newest_mids
=
list
(
change_meter_map
.
values
())
newest_mids
=
list
(
change_meter_map
.
values
())
meterdata_currents
=
[]
meterdata_currents
=
[]
if
newest_mids
:
if
newest_mids
:
meterdata_currents
=
await
RedisUtils
()
.
hmget
(
METERDATA_CURRENT_KEY
,
meterdata_currents
=
await
RedisUtils
()
.
hmget
(
METERDATA_CURRENT_KEY
,
*
newest_mids
)
*
newest_mids
)
now_tt
=
int
(
time
.
time
())
now_tt
=
int
(
time
.
time
())
if
meterdata_currents
:
if
meterdata_currents
:
total
=
0
total
=
0
for
item
in
meterdata_currents
:
for
item
in
meterdata_currents
:
...
@@ -614,16 +497,16 @@ async def current_load_new15(cid, end_dt=None):
...
@@ -614,16 +497,16 @@ async def current_load_new15(cid, end_dt=None):
if
not
end_dt
:
if
not
end_dt
:
end_dt
=
pendulum
.
now
(
tz
=
"Asia/Shanghai"
)
end_dt
=
pendulum
.
now
(
tz
=
"Asia/Shanghai"
)
start_dt
=
end_dt
.
subtract
(
minutes
=
2
)
start_dt
=
end_dt
.
subtract
(
minutes
=
2
)
sql
=
f
"select last_row(mdptime, pttl) from electric_stb "
\
sql
=
f
"select last_row(mdptime, pttl) from electric_stb "
\
f
"where TBNAME IN {td_mt_tables} and ts>='{str(start_dt)}' and ts "
\
f
"where TBNAME IN {td_mt_tables} and ts>='{str(start_dt)}' and ts "
\
f
"<='{str(end_dt)}' group by tbname"
f
"<='{str(end_dt)}' group by tbname"
url
=
f
"{SETTING.stb_url}db_electric?tz=Asia/Shanghai"
url
=
f
"{SETTING.stb_url}db_electric?tz=Asia/Shanghai"
is_succ
,
results
=
await
get_td_engine_data
(
url
,
sql
)
is_succ
,
results
=
await
get_td_engine_data
(
url
,
sql
)
if
not
is_succ
:
if
not
is_succ
:
return
""
return
""
if
not
results
[
"data"
]:
# 兼容:mt表(2.0架构)里面拿不到数据再从sid表(1.0架构)里面拿
if
not
results
[
"data"
]:
# 兼容:mt表(2.0架构)里面拿不到数据再从sid表(1.0架构)里面拿
td_s_tables
=
tuple
(
td_s_tables
=
tuple
(
(
f
"s{data['sid'].lower()}_e"
for
data
in
datas
if
data
[
"sid"
]))
(
f
"s{data['sid'].lower()}_e"
for
data
in
datas
if
data
[
"sid"
]))
...
@@ -633,7 +516,7 @@ async def current_load_new15(cid, end_dt=None):
...
@@ -633,7 +516,7 @@ async def current_load_new15(cid, end_dt=None):
is_succ
,
results
=
await
get_td_engine_data
(
url
,
sql
)
is_succ
,
results
=
await
get_td_engine_data
(
url
,
sql
)
if
not
is_succ
:
if
not
is_succ
:
return
""
return
""
head
=
parse_td_columns
(
results
)
head
=
parse_td_columns
(
results
)
datas
=
[]
datas
=
[]
for
res
in
results
[
"data"
]:
for
res
in
results
[
"data"
]:
...
@@ -657,7 +540,7 @@ async def power_count_info(company_id):
...
@@ -657,7 +540,7 @@ async def power_count_info(company_id):
now
=
datetime
.
now
()
now
=
datetime
.
now
()
start_time
=
(
now
-
timedelta
(
30
))
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
start_time
=
(
now
-
timedelta
(
30
))
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
end_time
=
now
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
end_time
=
now
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
max_30d_load
,
_time
=
await
pttl_max
(
company_id
,
start_time
,
end_time
,
-
1
)
max_30d_load
,
_time
=
await
pttl_max
(
company_id
,
start_time
,
end_time
,
-
1
)
cur_load
=
await
current_load
(
company_id
)
cur_load
=
await
current_load
(
company_id
)
return
cur_load
,
max_30d_load
return
cur_load
,
max_30d_load
...
@@ -668,7 +551,7 @@ async def power_count_info_new15(cid):
...
@@ -668,7 +551,7 @@ async def power_count_info_new15(cid):
now
=
datetime
.
now
()
now
=
datetime
.
now
()
start_time
=
(
now
-
timedelta
(
30
))
.
strftime
(
"
%
Y-
%
m-
%
d 00:00:00"
)
start_time
=
(
now
-
timedelta
(
30
))
.
strftime
(
"
%
Y-
%
m-
%
d 00:00:00"
)
end_time
=
now
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
end_time
=
now
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
max_30d_load
,
_time
=
await
pttl_max_new15
(
cid
,
start_time
,
end_time
,
-
1
)
max_30d_load
,
_time
=
await
pttl_max_new15
(
cid
,
start_time
,
end_time
,
-
1
)
cur_load
=
await
current_load_new15
(
cid
)
cur_load
=
await
current_load_new15
(
cid
)
return
round_2
(
cur_load
),
round_2
(
max_30d_load
)
return
round_2
(
cur_load
),
round_2
(
max_30d_load
)
...
@@ -704,7 +587,7 @@ async def get_max_aiao_of_filed(company_id, start_time, end_time,
...
@@ -704,7 +587,7 @@ async def get_max_aiao_of_filed(company_id, start_time, end_time,
index
=
constants
.
LOCATION_15MIN_AIAO
)
index
=
constants
.
LOCATION_15MIN_AIAO
)
value_max
=
es_results
.
get
(
"aggregations"
,
{})
.
get
(
"value_max_max"
,
{})
value_max
=
es_results
.
get
(
"aggregations"
,
{})
.
get
(
"value_max_max"
,
{})
rc_max_hits
=
value_max
.
get
(
"hits"
,
{})
.
get
(
"hits"
)
rc_max_hits
=
value_max
.
get
(
"hits"
,
{})
.
get
(
"hits"
)
max_info
,
location_map
=
{},
{}
max_info
,
location_map
=
{},
{}
if
rc_max_hits
:
if
rc_max_hits
:
max_info
=
rc_max_hits
[
0
][
"_source"
]
max_info
=
rc_max_hits
[
0
][
"_source"
]
...
@@ -719,7 +602,7 @@ async def get_max_aiao_of_filed(company_id, start_time, end_time,
...
@@ -719,7 +602,7 @@ async def get_max_aiao_of_filed(company_id, start_time, end_time,
if
max_info
if
max_info
else
None
else
None
)
)
return
MaxResidualCurrent
(
return
MaxResidualCurrent
(
max
=
round
(
max_info
[
"value_max"
],
2
)
if
max_info
else
None
,
max
=
round
(
max_info
[
"value_max"
],
2
)
if
max_info
else
None
,
location_name
=
f
"{location_map['group']}_{'漏电流' if location_map['item'] == 'default' else location_map['item']}"
location_name
=
f
"{location_map['group']}_{'漏电流' if location_map['item'] == 'default' else location_map['item']}"
...
@@ -804,7 +687,7 @@ async def power_charge_price(company_id):
...
@@ -804,7 +687,7 @@ async def power_charge_price(company_id):
yestoday_start
=
datetime
(
yestoday
.
year
,
yestoday
.
month
,
yestoday
.
day
,
0
,
yestoday_start
=
datetime
(
yestoday
.
year
,
yestoday
.
month
,
yestoday
.
day
,
0
,
0
,
0
)
0
,
0
)
yestoday_end
=
yestoday_start
+
timedelta
(
1
)
yestoday_end
=
yestoday_start
+
timedelta
(
1
)
es_yestoday_start
=
datetime
.
strftime
(
yestoday_start
,
es_yestoday_start
=
datetime
.
strftime
(
yestoday_start
,
"
%
Y-
%
m-
%
dT
%
H:
%
M:
%
S+08:00"
)
"
%
Y-
%
m-
%
dT
%
H:
%
M:
%
S+08:00"
)
es_yestoday_end
=
datetime
.
strftime
(
yestoday_end
,
es_yestoday_end
=
datetime
.
strftime
(
yestoday_end
,
...
@@ -812,7 +695,7 @@ async def power_charge_price(company_id):
...
@@ -812,7 +695,7 @@ async def power_charge_price(company_id):
yestoday_price
=
await
get_company_charge_price
(
company_id
,
yestoday_price
=
await
get_company_charge_price
(
company_id
,
es_yestoday_start
,
es_yestoday_start
,
es_yestoday_end
)
es_yestoday_end
)
if
now
.
month
==
1
:
if
now
.
month
==
1
:
last_month
=
12
last_month
=
12
year
=
now
.
year
-
1
year
=
now
.
year
-
1
...
@@ -827,7 +710,7 @@ async def power_charge_price(company_id):
...
@@ -827,7 +710,7 @@ async def power_charge_price(company_id):
last_month_price
=
await
get_company_charge_price
(
last_month_price
=
await
get_company_charge_price
(
company_id
,
es_last_month_start
,
es_last_month_end
company_id
,
es_last_month_start
,
es_last_month_end
)
)
return
yestoday_price
,
last_month_price
return
yestoday_price
,
last_month_price
...
@@ -861,90 +744,7 @@ async def power_charge_price_new15(cid):
...
@@ -861,90 +744,7 @@ async def power_charge_price_new15(cid):
return
round_2
(
yestoday_price
),
round_2
(
last_month_price
)
return
round_2
(
yestoday_price
),
round_2
(
last_month_price
)
async
def
power_factor
(
company_id
):
async
def
cal_power_factor
(
cid
):
"""
首页获取实时功率因数, 上月功率因数
:param company_id:
:return:
"""
async
with
MysqlUtil
()
as
conn
:
point_sql
=
(
"select pid, inlid_belongedto from point where cid=
%
s and add_to_company=
%
s"
)
points
=
await
conn
.
fetchall
(
point_sql
,
args
=
(
company_id
,
1
))
point_ids
=
[
i
[
"pid"
]
for
i
in
points
]
now
=
datetime
.
now
()
if
now
.
month
==
1
:
last_month_dt
=
datetime
(
year
=
now
.
year
-
1
,
month
=
12
,
day
=
1
)
else
:
last_month_dt
=
datetime
(
year
=
now
.
year
,
month
=
now
.
month
-
1
,
day
=
1
)
# 首页功率因数取所有进线中最小的
async
with
MysqlUtil
()
as
conn
:
sql
=
"SELECT inlid, `name` FROM inline WHERE cid=
%
s"
inlines
=
await
conn
.
fetchall
(
sql
,
args
=
(
company_id
,))
inline_ids
=
[
inline
[
"inlid"
]
for
inline
in
inlines
]
power_factor_results
=
[]
sql
=
"SELECT inlid, save_charge pf_cost, `kpi_x`, `save_charge` "
\
"FROM algo_power_factor_result WHERE inlid in
%
s and month=
%
s"
if
inline_ids
:
power_factor_results
=
await
conn
.
fetchall
(
sql
,
args
=
(
inline_ids
,
last_month_dt
))
pf_kpi_x_list
=
[
i
[
"kpi_x"
]
for
i
in
power_factor_results
if
type
(
i
[
"kpi_x"
])
in
[
int
,
float
]
]
last_month_cos
=
min
(
pf_kpi_x_list
)
if
len
(
pf_kpi_x_list
)
else
""
async
with
EsUtil
()
as
es
:
dt
=
pendulum
.
now
(
tz
=
"Asia/Shanghai"
)
tstamp
=
dt
.
int_timestamp
//
(
15
*
60
)
*
(
15
*
60
)
dt
=
pendulum
.
from_timestamp
(
tstamp
,
tz
=
"Asia/Shanghai"
)
filters
=
[
{
"terms"
:
{
"pid"
:
point_ids
}},
{
"terms"
:
{
"quarter_time"
:
[
str
(
dt
),
str
(
dt
-
timedelta
(
minutes
=
15
))]}},
]
query_body
=
{
"_source"
:
[
"pid"
,
"quarter_time"
,
"pttl_mean"
,
"qttl_mean"
],
"query"
:
{
"bool"
:
{
"filter"
:
filters
}},
"size"
:
10000
,
"sort"
:
[{
"pid"
:
{
"order"
:
"asc"
}},
{
"quarter_time"
:
{
"order"
:
"desc"
}}],
}
es_result
=
await
es
.
search_origin
(
body
=
query_body
,
index
=
constants
.
POINT_15MIN_INDEX
)
point_infos
=
es_result
[
"hits"
][
"hits"
]
point_map
=
{}
for
i
in
point_infos
:
item
=
i
[
"_source"
]
point_map
.
setdefault
(
item
[
"pid"
],
[])
.
append
(
{
"quarter_time"
:
item
[
"quarter_time"
],
"pttl_mean"
:
item
[
"pttl_mean"
],
"qttl_mean"
:
item
[
"qttl_mean"
],
}
)
total_pttl
,
total_qttl
=
0
,
0
for
point_id
,
records
in
point_map
.
items
():
total_pttl
+=
records
[
0
][
"pttl_mean"
]
total_qttl
+=
records
[
0
][
"qttl_mean"
]
# 计算实时功率的公式
cos_ttl
=
""
l
=
sqrt
(
total_pttl
*
total_pttl
+
total_qttl
*
total_qttl
)
if
l
:
cos_ttl
=
round
(
total_pttl
/
l
,
2
)
if
type
(
last_month_cos
)
in
[
int
,
float
]:
last_month_cos
=
round
(
last_month_cos
,
2
)
return
cos_ttl
,
last_month_cos
async
def
power_factor_new15
(
cid
):
"""首页获取实时功率因数, 上月功率因数"""
"""首页获取实时功率因数, 上月功率因数"""
point_sql
=
"select pid,inlid from point where cid=
%
s and add_to_company=1"
point_sql
=
"select pid,inlid from point where cid=
%
s and add_to_company=1"
async
with
MysqlUtil
()
as
conn
:
async
with
MysqlUtil
()
as
conn
:
...
@@ -971,7 +771,7 @@ async def power_factor_new15(cid):
...
@@ -971,7 +771,7 @@ async def power_factor_new15(cid):
type
(
i
[
"kpi_x"
])
in
[
int
,
float
]
type
(
i
[
"kpi_x"
])
in
[
int
,
float
]
]
]
last_month_cos
=
min
(
pf_kpi_x_list
)
if
len
(
pf_kpi_x_list
)
else
""
last_month_cos
=
min
(
pf_kpi_x_list
)
if
len
(
pf_kpi_x_list
)
else
""
dt
=
pendulum
.
now
(
tz
=
"Asia/Shanghai"
)
dt
=
pendulum
.
now
(
tz
=
"Asia/Shanghai"
)
tstamp
=
dt
.
int_timestamp
//
(
15
*
60
)
*
(
15
*
60
)
tstamp
=
dt
.
int_timestamp
//
(
15
*
60
)
*
(
15
*
60
)
dt
=
pendulum
.
from_timestamp
(
tstamp
,
tz
=
"Asia/Shanghai"
)
dt
=
pendulum
.
from_timestamp
(
tstamp
,
tz
=
"Asia/Shanghai"
)
...
@@ -999,7 +799,7 @@ async def power_factor_new15(cid):
...
@@ -999,7 +799,7 @@ async def power_factor_new15(cid):
l
=
sqrt
(
total_pttl
*
total_pttl
+
total_qttl
*
total_qttl
)
l
=
sqrt
(
total_pttl
*
total_pttl
+
total_qttl
*
total_qttl
)
if
l
:
if
l
:
cos_ttl
=
round
(
total_pttl
/
l
,
2
)
cos_ttl
=
round
(
total_pttl
/
l
,
2
)
if
type
(
last_month_cos
)
in
[
int
,
float
]:
if
type
(
last_month_cos
)
in
[
int
,
float
]:
last_month_cos
=
round
(
last_month_cos
,
2
)
last_month_cos
=
round
(
last_month_cos
,
2
)
return
cos_ttl
,
last_month_cos
return
cos_ttl
,
last_month_cos
...
@@ -1011,12 +811,12 @@ async def optimization_count_info(company_id: int):
...
@@ -1011,12 +811,12 @@ async def optimization_count_info(company_id: int):
:param company_id:
:param company_id:
:return:
:return:
"""
"""
async
with
MysqlUtil
()
as
conn
:
async
with
MysqlUtil
()
as
conn
:
sql
=
"SELECT inlid, `name` FROM inline WHERE cid=
%
s"
sql
=
"SELECT inlid, `name` FROM inline WHERE cid=
%
s"
inlines
=
await
conn
.
fetchall
(
sql
,
args
=
(
company_id
,))
inlines
=
await
conn
.
fetchall
(
sql
,
args
=
(
company_id
,))
inline_ids
=
[
inline
[
"inlid"
]
for
inline
in
inlines
]
inline_ids
=
[
inline
[
"inlid"
]
for
inline
in
inlines
]
# 获取公司上月用电
# 获取公司上月用电
# now = datetime.now()
# now = datetime.now()
# es_start_time = (
# es_start_time = (
...
@@ -1028,12 +828,12 @@ async def optimization_count_info(company_id: int):
...
@@ -1028,12 +828,12 @@ async def optimization_count_info(company_id: int):
# "%Y-%m-%dT%H:%M:%S+08:00")
# "%Y-%m-%dT%H:%M:%S+08:00")
# power_use_info = await company_power_use_info(company_id, es_start_time,
# power_use_info = await company_power_use_info(company_id, es_start_time,
# es_end_time)
# es_end_time)
now
=
datetime
.
now
()
now
=
datetime
.
now
()
start_time
=
(
start_time
=
(
pendulum
.
datetime
(
now
.
year
,
now
.
month
,
1
)
pendulum
.
datetime
(
now
.
year
,
now
.
month
,
1
)
.
subtract
(
months
=
1
)
.
subtract
(
months
=
1
)
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
)
)
end_time
=
pendulum
.
datetime
(
now
.
year
,
now
.
month
,
1
)
.
strftime
(
end_time
=
pendulum
.
datetime
(
now
.
year
,
now
.
month
,
1
)
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
...
@@ -1057,13 +857,13 @@ async def optimization_count_info(company_id: int):
...
@@ -1057,13 +857,13 @@ async def optimization_count_info(company_id: int):
}
}
)
)
return
count_info_map
return
count_info_map
now
=
datetime
.
now
()
now
=
datetime
.
now
()
if
now
.
month
==
1
:
if
now
.
month
==
1
:
last_month_dt
=
datetime
(
year
=
now
.
year
-
1
,
month
=
12
,
day
=
1
)
last_month_dt
=
datetime
(
year
=
now
.
year
-
1
,
month
=
12
,
day
=
1
)
else
:
else
:
last_month_dt
=
datetime
(
year
=
now
.
year
,
month
=
now
.
month
-
1
,
day
=
1
)
last_month_dt
=
datetime
(
year
=
now
.
year
,
month
=
now
.
month
-
1
,
day
=
1
)
# 功率因数
# 功率因数
async
with
MysqlUtil
()
as
conn
:
async
with
MysqlUtil
()
as
conn
:
sql
=
"SELECT inlid, `cos`, save_charge pf_cost, kpi_x, save_charge "
\
sql
=
"SELECT inlid, `cos`, save_charge pf_cost, kpi_x, save_charge "
\
...
@@ -1077,7 +877,7 @@ async def optimization_count_info(company_id: int):
...
@@ -1077,7 +877,7 @@ async def optimization_count_info(company_id: int):
2
,
2
,
)
)
total_pf_save
=
0
if
total_pf_save
<=
0
else
total_pf_save
total_pf_save
=
0
if
total_pf_save
<=
0
else
total_pf_save
pf_kpi_x_list
=
[
pf_kpi_x_list
=
[
i
[
"kpi_x"
]
for
i
in
power_factor_results
if
i
[
"kpi_x"
]
for
i
in
power_factor_results
if
type
(
i
[
"kpi_x"
])
in
[
int
,
float
]
type
(
i
[
"kpi_x"
])
in
[
int
,
float
]
...
@@ -1093,20 +893,20 @@ async def optimization_count_info(company_id: int):
...
@@ -1093,20 +893,20 @@ async def optimization_count_info(company_id: int):
pf_desc
=
"空间适中"
pf_desc
=
"空间适中"
else
:
else
:
pf_desc
=
"空间较大"
pf_desc
=
"空间较大"
count_info_map
[
"power_factor"
]
=
{
count_info_map
[
"power_factor"
]
=
{
"save_charge"
:
total_pf_save
if
pf_kpi_x
!=
""
else
""
,
"save_charge"
:
total_pf_save
if
pf_kpi_x
!=
""
else
""
,
"kpi_x"
:
pf_kpi_x
,
"kpi_x"
:
pf_kpi_x
,
"desc"
:
pf_desc
,
"desc"
:
pf_desc
,
}
}
# 移峰填谷指数
# 移峰填谷指数
async
with
MysqlUtil
()
as
conn
:
async
with
MysqlUtil
()
as
conn
:
sql
=
"select `score`, `cost_save` from `algo_plsi_result` "
\
sql
=
"select `score`, `cost_save` from `algo_plsi_result` "
\
"where `inlid` in
%
s and `month` =
%
s"
"where `inlid` in
%
s and `month` =
%
s"
last_month_str
=
datetime
.
strftime
(
last_month_dt
,
"
%
Y-
%
m"
)
last_month_str
=
datetime
.
strftime
(
last_month_dt
,
"
%
Y-
%
m"
)
pcvfs
=
await
conn
.
fetchall
(
sql
,
args
=
(
inline_ids
,
last_month_str
))
pcvfs
=
await
conn
.
fetchall
(
sql
,
args
=
(
inline_ids
,
last_month_str
))
pcvf_kpi_x_list
=
[
i
[
"score"
]
for
i
in
pcvfs
if
pcvf_kpi_x_list
=
[
i
[
"score"
]
for
i
in
pcvfs
if
type
(
i
[
"score"
])
in
[
int
,
float
]]
type
(
i
[
"score"
])
in
[
int
,
float
]]
pcvf_kpi_x
=
min
(
pcvf_kpi_x_list
)
if
len
(
pcvf_kpi_x_list
)
else
""
pcvf_kpi_x
=
min
(
pcvf_kpi_x_list
)
if
len
(
pcvf_kpi_x_list
)
else
""
...
@@ -1114,7 +914,7 @@ async def optimization_count_info(company_id: int):
...
@@ -1114,7 +914,7 @@ async def optimization_count_info(company_id: int):
sum
([
i
[
"cost_save"
]
for
i
in
pcvfs
if
sum
([
i
[
"cost_save"
]
for
i
in
pcvfs
if
i
[
"cost_save"
]
and
i
[
"cost_save"
]
>=
0
]),
2
i
[
"cost_save"
]
and
i
[
"cost_save"
]
>=
0
]),
2
)
)
if
pcvf_kpi_x
==
""
:
if
pcvf_kpi_x
==
""
:
pcvf_desc
=
""
pcvf_desc
=
""
elif
pcvf_kpi_x
>=
90
:
elif
pcvf_kpi_x
>=
90
:
...
@@ -1125,14 +925,14 @@ async def optimization_count_info(company_id: int):
...
@@ -1125,14 +925,14 @@ async def optimization_count_info(company_id: int):
pcvf_desc
=
"空间适中"
pcvf_desc
=
"空间适中"
else
:
else
:
pcvf_desc
=
"空间较大"
pcvf_desc
=
"空间较大"
total_pcvf_save
=
0
if
total_pcvf_save
<=
0
else
total_pcvf_save
total_pcvf_save
=
0
if
total_pcvf_save
<=
0
else
total_pcvf_save
count_info_map
[
"pcvf"
]
=
{
count_info_map
[
"pcvf"
]
=
{
"save_charge"
:
total_pcvf_save
if
pcvf_kpi_x
!=
""
else
""
,
"save_charge"
:
total_pcvf_save
if
pcvf_kpi_x
!=
""
else
""
,
"kpi_x"
:
pcvf_kpi_x
,
"kpi_x"
:
pcvf_kpi_x
,
"desc"
:
pcvf_desc
,
"desc"
:
pcvf_desc
,
}
}
# 经济运行
# 经济运行
async
with
MysqlUtil
()
as
conn
:
async
with
MysqlUtil
()
as
conn
:
sql
=
"select `kpi_x`, `save_charge`, `mean_load_factor` "
\
sql
=
"select `kpi_x`, `save_charge`, `mean_load_factor` "
\
...
@@ -1169,13 +969,13 @@ async def optimization_count_info(company_id: int):
...
@@ -1169,13 +969,13 @@ async def optimization_count_info(company_id: int):
economic_desc
=
"空间适中"
economic_desc
=
"空间适中"
else
:
else
:
economic_desc
=
"空间较大"
economic_desc
=
"空间较大"
count_info_map
[
"power_save"
]
=
{
count_info_map
[
"power_save"
]
=
{
"save_charge"
:
total_economic_save
if
economic_kpi_x
!=
""
else
""
,
"save_charge"
:
total_economic_save
if
economic_kpi_x
!=
""
else
""
,
"kpi_x"
:
economic_kpi_x
,
"kpi_x"
:
economic_kpi_x
,
"desc"
:
economic_desc
,
"desc"
:
economic_desc
,
}
}
# 最大需量
# 最大需量
async
with
MysqlUtil
()
as
conn
:
async
with
MysqlUtil
()
as
conn
:
sql
=
(
sql
=
(
...
@@ -1187,7 +987,7 @@ async def optimization_count_info(company_id: int):
...
@@ -1187,7 +987,7 @@ async def optimization_count_info(company_id: int):
)
)
last_month_str
=
datetime
.
strftime
(
last_month_dt
,
"
%
Y-
%
m"
)
last_month_str
=
datetime
.
strftime
(
last_month_dt
,
"
%
Y-
%
m"
)
md_spaces
=
await
conn
.
fetchall
(
sql
,
args
=
(
inline_ids
,
last_month_str
))
md_spaces
=
await
conn
.
fetchall
(
sql
,
args
=
(
inline_ids
,
last_month_str
))
md_space_kpi_x_list
=
[
i
[
"kpi_x"
]
for
i
in
md_spaces
if
md_space_kpi_x_list
=
[
i
[
"kpi_x"
]
for
i
in
md_spaces
if
type
(
i
[
"kpi_x"
])
in
[
int
,
float
]]
type
(
i
[
"kpi_x"
])
in
[
int
,
float
]]
md_space_kpi_x
=
max
(
md_space_kpi_x_list
)
if
len
(
md_space_kpi_x
=
max
(
md_space_kpi_x_list
)
if
len
(
...
@@ -1216,7 +1016,7 @@ async def optimization_count_info(company_id: int):
...
@@ -1216,7 +1016,7 @@ async def optimization_count_info(company_id: int):
"kpi_x"
:
md_space_kpi_x
,
"kpi_x"
:
md_space_kpi_x
,
"desc"
:
md_space_desc
,
"desc"
:
md_space_desc
,
}
}
total_save_cost
=
0
total_save_cost
=
0
for
_
,
item
in
count_info_map
.
items
():
for
_
,
item
in
count_info_map
.
items
():
total_save_cost
+=
(
total_save_cost
+=
(
...
@@ -1225,12 +1025,12 @@ async def optimization_count_info(company_id: int):
...
@@ -1225,12 +1025,12 @@ async def optimization_count_info(company_id: int):
)
)
save_percent
=
total_save_cost
/
month_charge
if
month_charge
else
""
save_percent
=
total_save_cost
/
month_charge
if
month_charge
else
""
count_info_map
[
"save_percent"
]
=
save_percent
count_info_map
[
"save_percent"
]
=
save_percent
# 计算最大需量
# 计算最大需量
async
with
MysqlUtil
()
as
conn
:
async
with
MysqlUtil
()
as
conn
:
sql
=
"select `price_md`,`price_tc` from `price_policy` where `cid`=
%
s"
sql
=
"select `price_md`,`price_tc` from `price_policy` where `cid`=
%
s"
price_policy
=
await
conn
.
fetchone
(
sql
,
args
=
(
company_id
,))
price_policy
=
await
conn
.
fetchone
(
sql
,
args
=
(
company_id
,))
total_md_space_charge
=
sum
(
total_md_space_charge
=
sum
(
[
i
[
"inline_md_charge"
]
for
i
in
md_spaces
if
i
[
"inline_md_charge"
]])
[
i
[
"inline_md_charge"
]
for
i
in
md_spaces
if
i
[
"inline_md_charge"
]])
total_md_space_p
=
(
total_md_space_p
=
(
...
@@ -1239,7 +1039,7 @@ async def optimization_count_info(company_id: int):
...
@@ -1239,7 +1039,7 @@ async def optimization_count_info(company_id: int):
else
""
else
""
)
)
count_info_map
[
"md_space_p"
]
=
total_md_space_p
count_info_map
[
"md_space_p"
]
=
total_md_space_p
# 经济运行最低负载率
# 经济运行最低负载率
mean_load_factors
=
[
mean_load_factors
=
[
i
[
"mean_load_factor"
]
for
i
in
economic_operations
if
i
[
"mean_load_factor"
]
for
i
in
economic_operations
if
...
@@ -1274,12 +1074,12 @@ async def electric_use_info_sdu(cid):
...
@@ -1274,12 +1074,12 @@ async def electric_use_info_sdu(cid):
}
}
}
}
}
}
log
.
info
(
"cal_score_safe_electric query_body={}"
.
format
(
query_body
))
log
.
info
(
"cal_score_safe_electric query_body={}"
.
format
(
query_body
))
async
with
EsUtil
()
as
es
:
async
with
EsUtil
()
as
es
:
es_result
=
await
es
.
search_origin
(
body
=
query_body
,
es_result
=
await
es
.
search_origin
(
body
=
query_body
,
index
=
constants
.
POINT_1MIN_EVENT
)
index
=
constants
.
POINT_1MIN_EVENT
)
score_buckets
=
(
score_buckets
=
(
es_result
.
get
(
"aggregations"
,
{})
.
get
(
"alarm_aggs"
,
{})
.
get
(
"buckets"
,
es_result
.
get
(
"aggregations"
,
{})
.
get
(
"alarm_aggs"
,
{})
.
get
(
"buckets"
,
[])
[])
...
@@ -1294,7 +1094,7 @@ async def electric_use_info_sdu(cid):
...
@@ -1294,7 +1094,7 @@ async def electric_use_info_sdu(cid):
second_alarm_cnt
+=
bucket
[
"doc_count"
]
second_alarm_cnt
+=
bucket
[
"doc_count"
]
elif
bucket
[
"key"
]
==
Importance
.
Third
.
value
:
elif
bucket
[
"key"
]
==
Importance
.
Third
.
value
:
third_alarm_cnt
+=
bucket
[
"doc_count"
]
third_alarm_cnt
+=
bucket
[
"doc_count"
]
company_point_map
=
await
get_points
([
cid
])
company_point_map
=
await
get_points
([
cid
])
point_len
=
len
(
company_point_map
.
get
(
cid
)
or
{})
point_len
=
len
(
company_point_map
.
get
(
cid
)
or
{})
alarm_score
=
(
alarm_score
=
(
...
@@ -1305,9 +1105,9 @@ async def electric_use_info_sdu(cid):
...
@@ -1305,9 +1105,9 @@ async def electric_use_info_sdu(cid):
)
)
if
alarm_score
>=
15
:
if
alarm_score
>=
15
:
alarm_score
=
15
alarm_score
=
15
electric_use_score
=
get_electric_index
(
alarm_score
)
electric_use_score
=
get_electric_index
(
alarm_score
)
log
.
info
(
log
.
info
(
"point_len={} alarm_score={} electric_use_score={}"
.
format
(
"point_len={} alarm_score={} electric_use_score={}"
.
format
(
point_len
,
alarm_score
,
electric_use_score
point_len
,
alarm_score
,
electric_use_score
...
@@ -1359,71 +1159,6 @@ async def electric_use_info_sdu_new15(cid):
...
@@ -1359,71 +1159,6 @@ async def electric_use_info_sdu_new15(cid):
async
def
electric_use_info_points_sdu
(
start
,
end
,
points
):
async
def
electric_use_info_points_sdu
(
start
,
end
,
points
):
"""用电安全指数, 识电u, 根据points来计算"""
start_es
=
convert_es_str
(
start
)
end_es
=
convert_es_str
(
end
)
query_body
=
{
"query"
:
{
"bool"
:
{
"filter"
:
[
{
"terms"
:
{
"point_id"
:
points
}},
{
"range"
:
{
"datetime"
:
{
"gte"
:
start_es
,
"lte"
:
end_es
,
}}},
{
"terms"
:
{
"type.keyword"
:
SDU_ALARM_LIST
}}
],
}
},
"size"
:
0
,
"aggs"
:
{
"alarm_aggs"
:
{
"terms"
:
{
"field"
:
"importance"
}
}
}
}
log
.
info
(
"electric_use_info_points query_body={}"
.
format
(
query_body
))
async
with
EsUtil
()
as
es
:
es_result
=
await
es
.
search_origin
(
body
=
query_body
,
index
=
constants
.
POINT_1MIN_EVENT
)
score_buckets
=
(
es_result
.
get
(
"aggregations"
,
{})
.
get
(
"alarm_aggs"
,
{})
.
get
(
"buckets"
,
[])
)
first_alarm_cnt
=
0
second_alarm_cnt
=
0
third_alarm_cnt
=
0
for
bucket
in
score_buckets
:
if
bucket
[
"key"
]
==
Importance
.
First
.
value
:
first_alarm_cnt
+=
bucket
[
"doc_count"
]
elif
bucket
[
"key"
]
==
Importance
.
Second
.
value
:
second_alarm_cnt
+=
bucket
[
"doc_count"
]
elif
bucket
[
"key"
]
==
Importance
.
Third
.
value
:
third_alarm_cnt
+=
bucket
[
"doc_count"
]
alarm_score
=
(
first_alarm_cnt
*
2
+
second_alarm_cnt
*
1
+
third_alarm_cnt
*
0.5
)
/
len
(
points
)
if
alarm_score
>=
15
:
alarm_score
=
15
electric_use_score
=
get_electric_index
(
alarm_score
)
log
.
info
(
"point_len={} alarm_score={} electric_use_score={}"
.
format
(
len
(
points
),
alarm_score
,
electric_use_score
)
)
return
ElectricInfo
(
first_alarm_cnt
=
first_alarm_cnt
,
second_alarm_cnt
=
second_alarm_cnt
,
third_alarm_cnt
=
third_alarm_cnt
,
alarm_score
=
alarm_score
,
electric_use_score
=
electric_use_score
,
)
async
def
electric_use_info_points_sdu_new15
(
start
,
end
,
points
):
"""用电安全指数, 识电u, 根据points来计算"""
"""用电安全指数, 识电u, 根据points来计算"""
sql
=
f
"select importance,count(*) as doc_count from point_1min_event "
\
sql
=
f
"select importance,count(*) as doc_count from point_1min_event "
\
f
"where pid in
%
s and event_datetime BETWEEN
%
s and
%
s "
\
f
"where pid in
%
s and event_datetime BETWEEN
%
s and
%
s "
\
...
@@ -1431,7 +1166,7 @@ async def electric_use_info_points_sdu_new15(start, end, points):
...
@@ -1431,7 +1166,7 @@ async def electric_use_info_points_sdu_new15(start, end, points):
async
with
MysqlUtil
()
as
conn
:
async
with
MysqlUtil
()
as
conn
:
results
=
await
conn
.
fetchall
(
sql
,
args
=
(
points
,
start
,
end
,
results
=
await
conn
.
fetchall
(
sql
,
args
=
(
points
,
start
,
end
,
SDU_ALARM_LIST
))
SDU_ALARM_LIST
))
first_alarm_cnt
=
0
first_alarm_cnt
=
0
second_alarm_cnt
=
0
second_alarm_cnt
=
0
third_alarm_cnt
=
0
third_alarm_cnt
=
0
...
@@ -1442,15 +1177,15 @@ async def electric_use_info_points_sdu_new15(start, end, points):
...
@@ -1442,15 +1177,15 @@ async def electric_use_info_points_sdu_new15(start, end, points):
second_alarm_cnt
+=
result
[
"doc_count"
]
second_alarm_cnt
+=
result
[
"doc_count"
]
elif
result
[
"importance"
]
==
Importance
.
Third
.
value
:
elif
result
[
"importance"
]
==
Importance
.
Third
.
value
:
third_alarm_cnt
+=
result
[
"doc_count"
]
third_alarm_cnt
+=
result
[
"doc_count"
]
alarm_score
=
(
first_alarm_cnt
*
2
+
second_alarm_cnt
*
1
+
alarm_score
=
(
first_alarm_cnt
*
2
+
second_alarm_cnt
*
1
+
third_alarm_cnt
*
0.5
)
/
len
(
points
)
third_alarm_cnt
*
0.5
)
/
len
(
points
)
if
alarm_score
>=
15
:
if
alarm_score
>=
15
:
alarm_score
=
15
alarm_score
=
15
electric_use_score
=
get_electric_index
(
alarm_score
)
electric_use_score
=
get_electric_index
(
alarm_score
)
log
.
info
(
log
.
info
(
"point_len={} alarm_score={} electric_use_score={}"
.
format
(
"point_len={} alarm_score={} electric_use_score={}"
.
format
(
len
(
points
),
alarm_score
,
electric_use_score
len
(
points
),
alarm_score
,
electric_use_score
...
@@ -1471,16 +1206,16 @@ async def optimization_count_info_new(company_id: int):
...
@@ -1471,16 +1206,16 @@ async def optimization_count_info_new(company_id: int):
:param company_id:
:param company_id:
:return:
:return:
"""
"""
inlines
=
await
get_inline_by_cid
(
company_id
)
inlines
=
await
get_inline_by_cid
(
company_id
)
inline_ids
=
[
inline
[
"inlid"
]
for
inline
in
inlines
]
inline_ids
=
[
inline
[
"inlid"
]
for
inline
in
inlines
]
# 获取公司上月用电
# 获取公司上月用电
now
=
datetime
.
now
()
now
=
datetime
.
now
()
es_start_time
=
(
es_start_time
=
(
pendulum
.
datetime
(
now
.
year
,
now
.
month
,
1
)
pendulum
.
datetime
(
now
.
year
,
now
.
month
,
1
)
.
subtract
(
months
=
1
)
.
subtract
(
months
=
1
)
.
strftime
(
"
%
Y-
%
m-
%
dT
%
H:
%
M:
%
S+08:00"
)
.
strftime
(
"
%
Y-
%
m-
%
dT
%
H:
%
M:
%
S+08:00"
)
)
)
es_end_time
=
pendulum
.
datetime
(
now
.
year
,
now
.
month
,
1
)
.
strftime
(
es_end_time
=
pendulum
.
datetime
(
now
.
year
,
now
.
month
,
1
)
.
strftime
(
"
%
Y-
%
m-
%
dT
%
H:
%
M:
%
S+08:00"
)
"
%
Y-
%
m-
%
dT
%
H:
%
M:
%
S+08:00"
)
...
@@ -1512,7 +1247,7 @@ async def optimization_count_info_new(company_id: int):
...
@@ -1512,7 +1247,7 @@ async def optimization_count_info_new(company_id: int):
last_month_dt
=
datetime
(
year
=
now
.
year
,
month
=
now
.
month
-
1
,
day
=
1
)
last_month_dt
=
datetime
(
year
=
now
.
year
,
month
=
now
.
month
-
1
,
day
=
1
)
last_month_str
=
datetime
.
strftime
(
last_month_dt
,
"
%
Y-
%
m"
)
last_month_str
=
datetime
.
strftime
(
last_month_dt
,
"
%
Y-
%
m"
)
# 功率因数
# 功率因数
power_factor_results
=
await
get_power_factor_kpi
(
inline_ids
,
power_factor_results
=
await
get_power_factor_kpi
(
inline_ids
,
last_month_dt
)
last_month_dt
)
total_pf_save
=
round
(
total_pf_save
=
round
(
...
@@ -1525,7 +1260,7 @@ async def optimization_count_info_new(company_id: int):
...
@@ -1525,7 +1260,7 @@ async def optimization_count_info_new(company_id: int):
(
i
[
"name"
],
i
[
"kpi_x"
])
for
i
in
power_factor_results
if
(
i
[
"name"
],
i
[
"kpi_x"
])
for
i
in
power_factor_results
if
type
(
i
[
"kpi_x"
])
in
[
int
,
float
]
type
(
i
[
"kpi_x"
])
in
[
int
,
float
]
]
]
if
len
(
pf_kpi_x_list
):
if
len
(
pf_kpi_x_list
):
pf_kpi_x_num
=
[
pf_kpi
[
1
]
for
pf_kpi
in
pf_kpi_x_list
]
pf_kpi_x_num
=
[
pf_kpi
[
1
]
for
pf_kpi
in
pf_kpi_x_list
]
pf_kpi_x
=
min
(
pf_kpi_x_num
)
pf_kpi_x
=
min
(
pf_kpi_x_num
)
...
@@ -1560,12 +1295,12 @@ async def optimization_count_info_new(company_id: int):
...
@@ -1560,12 +1295,12 @@ async def optimization_count_info_new(company_id: int):
pcvfs
=
await
get_pcvf_kpi
(
inline_ids
,
last_month_str
)
pcvfs
=
await
get_pcvf_kpi
(
inline_ids
,
last_month_str
)
pcvf_kpi_x_list
=
[(
i
[
"name"
],
i
[
"score"
])
for
i
in
pcvfs
if
pcvf_kpi_x_list
=
[(
i
[
"name"
],
i
[
"score"
])
for
i
in
pcvfs
if
type
(
i
[
"score"
])
in
[
int
,
float
]]
type
(
i
[
"score"
])
in
[
int
,
float
]]
if
len
(
pcvf_kpi_x_list
):
if
len
(
pcvf_kpi_x_list
):
pcvf_kpi_x_num
=
[
pcvf_kpi
[
1
]
for
pcvf_kpi
in
pcvf_kpi_x_list
]
pcvf_kpi_x_num
=
[
pcvf_kpi
[
1
]
for
pcvf_kpi
in
pcvf_kpi_x_list
]
pcvf_kpi_x
=
min
(
pcvf_kpi_x_num
)
pcvf_kpi_x
=
min
(
pcvf_kpi_x_num
)
pcvf_kpi_x_name
=
[]
pcvf_kpi_x_name
=
[]
if
pcvf_kpi_x
<
70
:
if
pcvf_kpi_x
<
70
:
for
index
,
kpi_num
in
enumerate
(
pcvf_kpi_x_num
):
for
index
,
kpi_num
in
enumerate
(
pcvf_kpi_x_num
):
if
kpi_num
<
70
:
if
kpi_num
<
70
:
...
@@ -1579,7 +1314,7 @@ async def optimization_count_info_new(company_id: int):
...
@@ -1579,7 +1314,7 @@ async def optimization_count_info_new(company_id: int):
f
"引入新能源,转移高峰电量至低谷"
f
"引入新能源,转移高峰电量至低谷"
else
:
else
:
pcvf_desc
=
"平均电价处于较低水平,请继续保持"
pcvf_desc
=
"平均电价处于较低水平,请继续保持"
else
:
else
:
pcvf_kpi_x
=
""
pcvf_kpi_x
=
""
pcvf_desc
=
""
pcvf_desc
=
""
...
@@ -1597,7 +1332,7 @@ async def optimization_count_info_new(company_id: int):
...
@@ -1597,7 +1332,7 @@ async def optimization_count_info_new(company_id: int):
sum
([
i
[
"cost_save"
]
for
i
in
pcvfs
if
sum
([
i
[
"cost_save"
]
for
i
in
pcvfs
if
i
[
"cost_save"
]
and
i
[
"cost_save"
]
>=
0
]),
2
i
[
"cost_save"
]
and
i
[
"cost_save"
]
>=
0
]),
2
)
)
total_pcvf_save
=
0
if
total_pcvf_save
<=
0
else
total_pcvf_save
total_pcvf_save
=
0
if
total_pcvf_save
<=
0
else
total_pcvf_save
count_info_map
[
"pcvf"
]
=
{
count_info_map
[
"pcvf"
]
=
{
"save_charge"
:
total_pcvf_save
if
pcvf_kpi_x
!=
""
else
""
,
"save_charge"
:
total_pcvf_save
if
pcvf_kpi_x
!=
""
else
""
,
...
@@ -1605,7 +1340,7 @@ async def optimization_count_info_new(company_id: int):
...
@@ -1605,7 +1340,7 @@ async def optimization_count_info_new(company_id: int):
"desc"
:
pcvf_desc
,
"desc"
:
pcvf_desc
,
"space"
:
pcvf_space
"space"
:
pcvf_space
}
}
# 经济运行
# 经济运行
economic_operations
=
await
get_economic_kpi
(
inline_ids
,
last_month_str
)
economic_operations
=
await
get_economic_kpi
(
inline_ids
,
last_month_str
)
economic_kpi_x_list
=
[
economic_kpi_x_list
=
[
...
@@ -1666,14 +1401,14 @@ async def optimization_count_info_new(company_id: int):
...
@@ -1666,14 +1401,14 @@ async def optimization_count_info_new(company_id: int):
"desc"
:
economic_desc
,
"desc"
:
economic_desc
,
"space"
:
econ_space
"space"
:
econ_space
}
}
# 容量、需量价格
# 容量、需量价格
price_policy
=
await
price_policy_by_cid
(
company_id
)
price_policy
=
await
price_policy_by_cid
(
company_id
)
price_md
=
price_policy
[
"price_md"
]
if
price_policy
[
"price_md"
]
else
0
price_md
=
price_policy
[
"price_md"
]
if
price_policy
[
"price_md"
]
else
0
price_tc
=
price_policy
[
"price_tc"
]
if
price_policy
[
"price_tc"
]
else
0
price_tc
=
price_policy
[
"price_tc"
]
if
price_policy
[
"price_tc"
]
else
0
# 最大需量
# 最大需量
md_spaces
=
await
get_md_space
(
inline_ids
,
last_month_dt
)
md_spaces
=
await
get_md_space
(
inline_ids
,
last_month_dt
)
md_space_kpi_x_list
=
[
i
[
"kpi_x"
]
for
i
in
md_spaces
if
md_space_kpi_x_list
=
[
i
[
"kpi_x"
]
for
i
in
md_spaces
if
type
(
i
[
"kpi_x"
])
in
[
int
,
float
]]
type
(
i
[
"kpi_x"
])
in
[
int
,
float
]]
md_space_kpi_x
=
max
(
md_space_kpi_x_list
)
if
len
(
md_space_kpi_x
=
max
(
md_space_kpi_x_list
)
if
len
(
...
@@ -1703,13 +1438,13 @@ async def optimization_count_info_new(company_id: int):
...
@@ -1703,13 +1438,13 @@ async def optimization_count_info_new(company_id: int):
md_space_tc_runtimes
[
index
][
"tc_runtime"
]
*
price_tc
>=
\
md_space_tc_runtimes
[
index
][
"tc_runtime"
]
*
price_tc
>=
\
price_md
*
item
[
"inline_md_predict"
]:
price_md
*
item
[
"inline_md_predict"
]:
md_space_name
.
append
(
md_space_tc_runtimes
[
index
][
"name"
])
md_space_name
.
append
(
md_space_tc_runtimes
[
index
][
"name"
])
if
len
(
md_space_name
):
if
len
(
md_space_name
):
md_space_desc
=
f
"若次月负荷无较大变动,建议{'、'.join(md_space_name)}"
\
md_space_desc
=
f
"若次月负荷无较大变动,建议{'、'.join(md_space_name)}"
\
f
"选择按最大需量计费"
f
"选择按最大需量计费"
else
:
else
:
md_space_desc
=
"不存在容改需空间"
md_space_desc
=
"不存在容改需空间"
count_info_map
[
"md_space"
]
=
{
count_info_map
[
"md_space"
]
=
{
"save_charge"
:
total_md_space_save
if
md_space_kpi_x
!=
""
else
""
,
"save_charge"
:
total_md_space_save
if
md_space_kpi_x
!=
""
else
""
,
"kpi_x"
:
md_space_kpi_x
,
"kpi_x"
:
md_space_kpi_x
,
...
@@ -1751,7 +1486,7 @@ async def cid_alarm_importance_count(cid, start, end):
...
@@ -1751,7 +1486,7 @@ async def cid_alarm_importance_count(cid, start, end):
point_list
=
[
i
[
"pid"
]
for
i
in
monitor_point_list
]
point_list
=
[
i
[
"pid"
]
for
i
in
monitor_point_list
]
es_res
=
await
sdu_alarm_importance_dao_new15
(
start
,
end
,
point_list
)
es_res
=
await
sdu_alarm_importance_dao_new15
(
start
,
end
,
point_list
)
es_res_key
=
{
i
[
"key"
]:
i
for
i
in
es_res
}
es_res_key
=
{
i
[
"key"
]:
i
for
i
in
es_res
}
res_list
=
[]
res_list
=
[]
for
info
in
monitor_point_list
:
for
info
in
monitor_point_list
:
name
=
info
.
get
(
"name"
)
name
=
info
.
get
(
"name"
)
...
@@ -1767,7 +1502,7 @@ async def cid_alarm_importance_count(cid, start, end):
...
@@ -1767,7 +1502,7 @@ async def cid_alarm_importance_count(cid, start, end):
tmp_dic
[
"second"
]
+=
b
[
"doc_count"
]
tmp_dic
[
"second"
]
+=
b
[
"doc_count"
]
elif
b
[
"key"
]
==
Importance
.
Third
.
value
:
elif
b
[
"key"
]
==
Importance
.
Third
.
value
:
tmp_dic
[
"third"
]
+=
b
[
"doc_count"
]
tmp_dic
[
"third"
]
+=
b
[
"doc_count"
]
tmp_dic
[
"alarm_count"
]
=
tmp_dic
[
"first"
]
+
tmp_dic
[
"second"
]
+
\
tmp_dic
[
"alarm_count"
]
=
tmp_dic
[
"first"
]
+
tmp_dic
[
"second"
]
+
\
tmp_dic
[
tmp_dic
[
"third"
]
"third"
]
...
...
unify_api/modules/home_page/service/count_info_service.py
View file @
de51a7ff
...
@@ -2,8 +2,6 @@ import ast
...
@@ -2,8 +2,6 @@ import ast
from
unify_api.constants
import
SDU_ONE_TWO_GRADE_ALARM
from
unify_api.constants
import
SDU_ONE_TWO_GRADE_ALARM
from
pot_libs.mysql_util.mysql_util
import
MysqlUtil
from
pot_libs.mysql_util.mysql_util
import
MysqlUtil
from
unify_api.constants
import
CO2_N
from
unify_api.constants
import
CO2_N
from
unify_api.modules.alarm_manager.dao.list_static_dao
import
\
sdu_alarm_aggs_date
,
sdu_alarm_aggs_date_importance
from
unify_api.modules.carbon_neutral.service.carbon_reduce_service
import
\
from
unify_api.modules.carbon_neutral.service.carbon_reduce_service
import
\
carbon_emission_index_service
carbon_emission_index_service
from
unify_api.modules.common.dao.common_dao
import
monitor_by_cid
,
tsp_by_cid
,
\
from
unify_api.modules.common.dao.common_dao
import
monitor_by_cid
,
tsp_by_cid
,
\
...
@@ -21,10 +19,10 @@ from unify_api.modules.home_page.dao.count_info_dao import \
...
@@ -21,10 +19,10 @@ from unify_api.modules.home_page.dao.count_info_dao import \
alarm_aggs_point_location
alarm_aggs_point_location
from
unify_api.modules.home_page.procedures.count_info_pds
import
other_info
,
\
from
unify_api.modules.home_page.procedures.count_info_pds
import
other_info
,
\
electric_use_info
,
cid_alarm_importance_count
,
\
electric_use_info
,
cid_alarm_importance_count
,
\
alarm_importance_count_total
,
power_factor
,
current_load
,
\
alarm_importance_count_total
,
current_load
,
\
get_company_charge_price
,
health_status_res
,
carbon_status_res_web
,
\
get_company_charge_price
,
health_status_res
,
carbon_status_res_web
,
\
optimization_count_info
,
economic_index_desc
,
electric_use_info_new15
,
\
optimization_count_info
,
economic_index_desc
,
\
power_factor_new15
,
current_load_new15
cal_power_factor
,
current_load_new15
from
unify_api.modules.home_page.procedures.count_info_proxy_pds
import
\
from
unify_api.modules.home_page.procedures.count_info_proxy_pds
import
\
alarm_percentage_count
,
alarm_safe_power
alarm_percentage_count
,
alarm_safe_power
from
unify_api.modules.tsp_water.dao.drop_dust_dao
import
\
from
unify_api.modules.tsp_water.dao.drop_dust_dao
import
\
...
@@ -170,7 +168,8 @@ async def info_yang_chen_service(cid):
...
@@ -170,7 +168,8 @@ async def info_yang_chen_service(cid):
if
pm25_max_list
and
max
(
pm25_max_list
)
<
35
:
if
pm25_max_list
and
max
(
pm25_max_list
)
<
35
:
air_quality
+=
1
air_quality
+=
1
# 3. 安全运行天数, 从接入平台算起,未出现一级报警则加一天
# 3. 安全运行天数, 从接入平台算起,未出现一级报警则加一天
alarm_es
=
await
sdu_alarm_aggs_date_importance
(
cid
)
# alarm_es = await sdu_alarm_aggs_date_importance(cid)
alarm_es
=
[]
safe_operation_days
=
0
safe_operation_days
=
0
for
alarm
in
alarm_es
:
for
alarm
in
alarm_es
:
in_bucket
=
alarm
[
"importance"
][
"buckets"
]
in_bucket
=
alarm
[
"importance"
][
"buckets"
]
...
@@ -269,7 +268,7 @@ async def alarm_price_costtl_service(cid):
...
@@ -269,7 +268,7 @@ async def alarm_price_costtl_service(cid):
# 1. 今日报警
# 1. 今日报警
imp_dic
=
await
alarm_importance_count_total
(
cid
,
today_start
,
today_end
)
imp_dic
=
await
alarm_importance_count_total
(
cid
,
today_start
,
today_end
)
# 2. 实时功率因数, 上月功率因数
# 2. 实时功率因数, 上月功率因数
cos_ttl
,
last_month_cos
=
await
power_factor_new15
(
cid
)
cos_ttl
,
last_month_cos
=
await
cal_power_factor
(
cid
)
# 3. 实时负荷
# 3. 实时负荷
cur_load
=
await
current_load_new15
(
cid
)
cur_load
=
await
current_load_new15
(
cid
)
# 4. 平均电价
# 4. 平均电价
...
@@ -400,7 +399,7 @@ async def all_index_info_service(cid):
...
@@ -400,7 +399,7 @@ async def all_index_info_service(cid):
health_index
=
round
(
health_index
)
health_index
=
round
(
health_index
)
health_status
=
health_status_res
(
health_index
,
"web"
)
health_status
=
health_status_res
(
health_index
,
"web"
)
# 2. 安全指数
# 2. 安全指数
elec_info
=
await
electric_use_info
_new15
(
cid
)
elec_info
=
await
electric_use_info
(
cid
)
safety_index
=
elec_info
.
electric_use_score
safety_index
=
elec_info
.
electric_use_score
safety_status
=
safety_ratio_res
(
safety_index
,
"web"
)
safety_status
=
safety_ratio_res
(
safety_index
,
"web"
)
# 3. 碳排指数
# 3. 碳排指数
...
...
unify_api/modules/home_page/views/count_info.py
View file @
de51a7ff
...
@@ -24,10 +24,10 @@ from unify_api.modules.home_page.procedures.count_info_pds import (
...
@@ -24,10 +24,10 @@ from unify_api.modules.home_page.procedures.count_info_pds import (
normal_rate_of_location
,
normal_rate_of_location_new15
,
normal_rate_of_location
,
normal_rate_of_location_new15
,
other_info
,
other_info_new15
,
other_info
,
other_info_new15
,
power_count_info
,
power_count_info_new15
,
power_count_info
,
power_count_info_new15
,
electric_use_info
,
electric_use_info_new15
,
electric_use_info
,
datetime_to_timestamp
,
datetime_to_timestamp
,
power_charge_price
,
power_charge_price_new15
,
power_charge_price
,
power_charge_price_new15
,
power_factor
,
power_factor_new15
,
cal_power_factor
,
optimization_count_info
,
optimization_count_info_new
optimization_count_info
,
optimization_count_info_new
)
)
from
unify_api.modules.home_page.service.count_info_service
import
\
from
unify_api.modules.home_page.service.count_info_service
import
\
...
@@ -79,7 +79,7 @@ async def post_count_info(request, body: CountInfoReq) -> CountInfoResp:
...
@@ -79,7 +79,7 @@ async def post_count_info(request, body: CountInfoReq) -> CountInfoResp:
# 用电安全指数, 报警分, 近30天报警1,2,3级数目
# 用电安全指数, 报警分, 近30天报警1,2,3级数目
# electric_info = await electric_use_info(company_id)
# electric_info = await electric_use_info(company_id)
electric_info
=
await
electric_use_info
_new15
(
company_id
)
electric_info
=
await
electric_use_info
(
company_id
)
# 昨日平均电价, 上月平均电价
# 昨日平均电价, 上月平均电价
# yestoday_price, last_month_price = await power_charge_price(
# yestoday_price, last_month_price = await power_charge_price(
...
@@ -89,7 +89,7 @@ async def post_count_info(request, body: CountInfoReq) -> CountInfoResp:
...
@@ -89,7 +89,7 @@ async def post_count_info(request, body: CountInfoReq) -> CountInfoResp:
# 实时功率因数, 上月功率因数
# 实时功率因数, 上月功率因数
# cos_ttl, last_month_cos = await power_factor(company_id)
# cos_ttl, last_month_cos = await power_factor(company_id)
cos_ttl
,
last_month_cos
=
await
power_factor_new15
(
company_id
)
cos_ttl
,
last_month_cos
=
await
cal_power_factor
(
company_id
)
# 其实异常捕获这个东西最好是在框架内部做一次就够了
# 其实异常捕获这个东西最好是在框架内部做一次就够了
except
(
ElasticsearchException
,
MySQLError
,
RedisError
)
as
e
:
except
(
ElasticsearchException
,
MySQLError
,
RedisError
)
as
e
:
...
...
unify_api/modules/home_page/views/count_info_proxy.py
View file @
de51a7ff
...
@@ -33,8 +33,7 @@ from unify_api.modules.home_page.components.count_info_proxy_cps import (
...
@@ -33,8 +33,7 @@ from unify_api.modules.home_page.components.count_info_proxy_cps import (
AlarmRankingReq
,
AlarmRankingReq
,
AipResp
,
CisResp
,
CisReq
,
AipResp
,
CisResp
,
CisReq
,
)
)
from
unify_api.modules.home_page.procedures.count_info_pds
import
other_info
,
\
from
unify_api.modules.home_page.procedures.count_info_pds
import
other_info
electric_use_info
from
unify_api.modules.home_page.procedures.count_info_proxy_pds
import
(
from
unify_api.modules.home_page.procedures.count_info_proxy_pds
import
(
security_level_count
,
security_level_count
,
alarm_percentage_count
,
alarm_percentage_count
,
...
...
unify_api/modules/shidianu/procedures/analysis_result_service.py
View file @
de51a7ff
import
json
import
json
from
datetime
import
datetime
,
timedelta
from
datetime
import
datetime
,
timedelta
from
pot_libs.mysql_util.mysql_util
import
MysqlUtil
from
pot_libs.utils.exc_util
import
BusinessException
from
pot_libs.utils.exc_util
import
BusinessException
from
unify_api
import
constants
from
unify_api
import
constants
from
unify_api.modules.alarm_manager.dao.list_static_dao
import
\
from
unify_api.modules.alarm_manager.dao.list_static_dao
import
\
sdu_alarm_behavior_dao
,
sdu_alarm_behavior_dao_new15
sdu_alarm_behavior_dao
from
unify_api.modules.common.procedures.points
import
get_meter_by_point
,
\
from
unify_api.modules.common.procedures.points
import
\
get_meter_by_point
_new15
get_meter_by_point
from
unify_api.modules.home_page.procedures.count_info_pds
import
\
from
unify_api.modules.home_page.procedures.count_info_pds
import
\
electric_use_info_points_sdu
,
electric_use_info_points_sdu_new15
electric_use_info_points_sdu
from
unify_api.modules.shidianu.components.algorithm_cps
import
WcResp
,
AbcResp
from
unify_api.modules.shidianu.components.algorithm_cps
import
WcResp
,
AbcResp
from
unify_api.modules.shidianu.dao.analysis_result_dao
import
\
from
unify_api.modules.shidianu.dao.analysis_result_dao
import
\
query_sdu_power_wave
,
query_sdu_recog_record
query_sdu_power_wave
,
query_sdu_recog_record
...
@@ -19,67 +18,11 @@ from unify_api.utils.time_format import last30_day_range, \
...
@@ -19,67 +18,11 @@ from unify_api.utils.time_format import last30_day_range, \
get_start_end_by_tz_time
,
day_slots
,
get_start_end_by_tz_time_new
get_start_end_by_tz_time
,
day_slots
,
get_start_end_by_tz_time_new
async
def
wave_curve_service
(
point_id
,
req_date
,
product
):
# 1. 获取曲线数据和slots
meter_info
=
await
get_meter_by_point
(
point_id
)
if
not
meter_info
:
raise
BusinessException
(
message
=
"没有该监测点的meter信息,请联系运维人员!"
)
sid
,
meter_no
=
meter_info
[
"sid"
],
meter_info
[
"meter_no"
]
dt
=
datetime
.
strptime
(
req_date
+
" 00:00:00"
,
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
time_slot
=
[
datetime
.
strftime
(
dt
+
timedelta
(
minutes
=
i
),
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
.
split
(
" "
)[
1
][:
5
]
for
i
in
range
(
1440
)
]
p_list
=
await
get_p_list
(
sid
,
meter_no
,
req_date
,
time_slot
)
# 2. 获取用电设备识别结果
start
,
end
=
get_start_end_by_tz_time
(
req_date
)
device_data
=
await
query_sdu_recog_record
(
point_id
,
start
,
end
)
electric_actions
=
{}
if
device_data
:
for
i
in
device_data
:
recog_dt
=
str
(
i
[
"recog_dt"
])
recog_dt
=
recog_dt
.
split
(
" "
)[
1
][:
5
]
act_info
=
json
.
loads
(
i
[
"act_info"
])
# 拼凑返回格式 04:02 违规电器,疑似电动车电池
tmp_dic
=
{}
(
key
,
value
),
=
act_info
.
items
()
type_str
=
constants
.
SDU_EVENT_TYPE_MAP
.
get
(
key
)
# 违规,大功率,正常电器, 只保留1个级别最高的展示
if
recog_dt
not
in
electric_actions
:
electric_actions
[
recog_dt
]
=
[
{
"type"
:
type_str
,
"value"
:
value
,
"type_str"
:
key
}
]
else
:
# 如果是电动车电池, 优先级最高, 同一分钟第一次已经是电动车电池continue
if
electric_actions
[
recog_dt
][
0
][
"value"
]
==
"电动车电池"
:
continue
# 如果本次正常电器, 不需要处理了
# 如果本次违规电器, 替换原来的
elif
key
==
"illegal_ele_app"
:
electric_actions
[
recog_dt
]
=
[
{
"type"
:
type_str
,
"value"
:
value
,
"type_str"
:
key
}
]
# 如果本次是大功率电器, 且原本是正常电器, 则替换
elif
key
==
"high_power_app"
and
electric_actions
[
recog_dt
][
0
][
"type_str"
]
==
"normal_app"
:
electric_actions
[
recog_dt
]
=
[
{
"type"
:
type_str
,
"value"
:
value
,
"type_str"
:
key
}
]
# electric_actions[recog_dt].append(
# {"type": type_str, "value": value})
return
WcResp
(
time_slot
=
time_slot
,
p_slot
=
p_list
,
electric_actions
=
electric_actions
)
async
def
wave_curve_service_new15
(
point_id
,
req_date
,
product
):
async
def
wave_curve_service_new15
(
point_id
,
req_date
,
product
):
# 1,获取slots
# 1,获取slots
time_slot
=
day_slots
()
time_slot
=
day_slots
()
# 2. 获取sid
# 2. 获取sid
meter_info
=
await
get_meter_by_point
_new15
(
point_id
)
meter_info
=
await
get_meter_by_point
(
point_id
)
if
not
meter_info
:
if
not
meter_info
:
raise
BusinessException
(
message
=
"没有该监测点的monitor信息,请联系运维人员!"
)
raise
BusinessException
(
message
=
"没有该监测点的monitor信息,请联系运维人员!"
)
mtid
,
meter_no
=
meter_info
[
"mtid"
],
meter_info
[
"meter_no"
]
mtid
,
meter_no
=
meter_info
[
"mtid"
],
meter_info
[
"meter_no"
]
...
@@ -127,6 +70,7 @@ async def wave_curve_service_new15(point_id, req_date, product):
...
@@ -127,6 +70,7 @@ async def wave_curve_service_new15(point_id, req_date, product):
electric_actions
=
electric_actions
electric_actions
=
electric_actions
)
)
async
def
alarm_behavior_curve_service
(
point_id
,
req_date
,
product
):
async
def
alarm_behavior_curve_service
(
point_id
,
req_date
,
product
):
# 1. 获取功率波动, 如果没有查询到功率波动,返回None
# 1. 获取功率波动, 如果没有查询到功率波动,返回None
wave_data
=
await
query_sdu_power_wave
(
point_id
,
req_date
+
" 00:00:00"
)
wave_data
=
await
query_sdu_power_wave
(
point_id
,
req_date
+
" 00:00:00"
)
...
@@ -136,9 +80,7 @@ async def alarm_behavior_curve_service(point_id, req_date, product):
...
@@ -136,9 +80,7 @@ async def alarm_behavior_curve_service(point_id, req_date, product):
power_swing
=
None
power_swing
=
None
# 2. 安全评价
# 2. 安全评价
start
,
end
=
last30_day_range
()
start
,
end
=
last30_day_range
()
# alarm_res = await electric_use_info_points_sdu(start, end, [point_id])
alarm_res
=
await
electric_use_info_points_sdu
(
start
,
end
,
[
point_id
])
alarm_res
=
await
electric_use_info_points_sdu_new15
(
start
,
end
,
[
point_id
])
safety_eval
=
{
"first_alarm_cnt"
:
alarm_res
.
first_alarm_cnt
,
safety_eval
=
{
"first_alarm_cnt"
:
alarm_res
.
first_alarm_cnt
,
"second_alarm_cnt"
:
alarm_res
.
second_alarm_cnt
,
"second_alarm_cnt"
:
alarm_res
.
second_alarm_cnt
,
"third_alarm_cnt"
:
alarm_res
.
third_alarm_cnt
,
"third_alarm_cnt"
:
alarm_res
.
third_alarm_cnt
,
...
@@ -147,7 +89,7 @@ async def alarm_behavior_curve_service(point_id, req_date, product):
...
@@ -147,7 +89,7 @@ async def alarm_behavior_curve_service(point_id, req_date, product):
}
}
# 3. 行为统计
# 3. 行为统计
# behavior_res = await sdu_alarm_behavior_dao(start, end, [point_id])
# behavior_res = await sdu_alarm_behavior_dao(start, end, [point_id])
behavior_res
=
await
sdu_alarm_behavior_dao
_new15
(
start
,
end
,
[
point_id
])
behavior_res
=
await
sdu_alarm_behavior_dao
(
start
,
end
,
[
point_id
])
behavior_illegal_app
=
[]
behavior_illegal_app
=
[]
if
behavior_res
:
if
behavior_res
:
for
i
in
behavior_res
:
for
i
in
behavior_res
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment