Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
U
unify_api2
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
chaonan
unify_api2
Commits
3eee08fd
Commit
3eee08fd
authored
Aug 22, 2022
by
lcn
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
BUG修复
parent
121f1444
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
137 additions
and
54 deletions
+137
-54
alarm_static_service.py
...api/modules/alarm_manager/service/alarm_static_service.py
+2
-2
pttl_max.py
unify_api/modules/common/procedures/pttl_max.py
+1
-1
security_info_pds.py
unify_api/modules/home_page/procedures/security_info_pds.py
+119
-40
count_info_proxy.py
unify_api/modules/home_page/views/count_info_proxy.py
+3
-3
security_info.py
unify_api/modules/home_page/views/security_info.py
+3
-4
time_format.py
unify_api/utils/time_format.py
+9
-4
No files found.
unify_api/modules/alarm_manager/service/alarm_static_service.py
View file @
3eee08fd
...
...
@@ -20,7 +20,7 @@ from unify_api.modules.common.procedures.points import points_by_storeys
from
unify_api.modules.home_page.components.security_info_cps
import
\
SecurityCountResp
,
LevelCount
,
ContentCount
,
AlarmContentDistributionResp
from
unify_api.modules.home_page.procedures.security_info_pds
import
\
alarm_co
unt_info
,
alarm_content_time_distribution
alarm_co
ntent_time_distribution
,
alarm_count_info_new15
from
unify_api.utils.common_utils
import
round_1
,
division_two
...
...
@@ -418,7 +418,7 @@ async def sdu_index_alarm_ranking_service_new15(cid, start, end, product):
async
def
zdu_level_distribution_service
(
cid
,
start
,
end
,
product
):
"""报警统计-报警等级-智电u"""
alarm_info_map
=
await
alarm_count_info
([
cid
],
start
,
end
,
"month"
)
alarm_info_map
=
await
alarm_count_info
_new15
([
cid
],
start
,
end
,
"month"
)
first_alarm
,
second_alarm
,
third_alarm
=
(
alarm_info_map
[
"first_alarm"
],
alarm_info_map
[
"second_alarm"
],
...
...
unify_api/modules/common/procedures/pttl_max.py
View file @
3eee08fd
...
...
@@ -132,7 +132,7 @@ async def pttl_max_new15(cid, start, end, point_id=None, inline_id=None):
# 根据时间范围, 返回不同时间格式
if
max_val_time
:
if
date_type
==
"day"
:
max_val_time
=
str
(
max_val_time
)[
:
10
]
max_val_time
=
str
(
max_val_time
)[
11
:
16
]
elif
date_type
==
"month"
:
max_val_time
=
str
(
max_val_time
)[
5
:
10
]
else
:
...
...
unify_api/modules/home_page/procedures/security_info_pds.py
View file @
3eee08fd
...
...
@@ -5,18 +5,22 @@ from datetime import datetime
from
pot_libs.es_util.es_utils
import
EsUtil
from
pot_libs.logger
import
log
from
pot_libs.mysql_util.mysql_util
import
MysqlUtil
from
unify_api
import
constants
from
unify_api.constants
import
Importance
,
Product
from
unify_api.modules.common.procedures.common_cps
import
(
proxy_safe_run_info
,
alarm_time_distribution
,
)
from
unify_api.utils.time_format
import
get_start_end_by_tz_time_new
,
\
proxy_power_slots
,
day_slots
async
def
alarm_count_info
(
company_ids
,
start
,
end
,
date_type
):
start_dt
=
datetime
.
strptime
(
start
,
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
end_dt
=
datetime
.
strptime
(
end
,
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
es_start_str
=
datetime
(
year
=
start_dt
.
year
,
month
=
start_dt
.
month
,
day
=
start_dt
.
day
)
.
strftime
(
es_start_str
=
datetime
(
year
=
start_dt
.
year
,
month
=
start_dt
.
month
,
day
=
start_dt
.
day
)
.
strftime
(
"
%
Y-
%
m-
%
dT
%
H:
%
M:
%
S+08:00"
)
es_end_str
=
end_dt
.
strftime
(
"
%
Y-
%
m-
%
dT
%
H:
%
M:
%
S+08:00"
)
...
...
@@ -25,7 +29,7 @@ async def alarm_count_info(company_ids, start, end, date_type):
_format
=
"yyyy-MM-dd HH:mm:ss"
_min
=
start_dt
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
_max
=
end_dt
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
else
:
# date_type == "month"
interval
=
"day"
...
...
@@ -35,10 +39,10 @@ async def alarm_count_info(company_ids, start, end, date_type):
filter_list
=
[
{
"range"
:
{
"datetime"
:
{
"gte"
:
es_start_str
,
"lte"
:
es_end_str
,
}}},
{
"terms"
:
{
"cid"
:
company_ids
}}]
query_body
=
{
"size"
:
0
,
"query"
:
{
"bool"
:
{
"filter"
:
filter_list
,}},
"query"
:
{
"bool"
:
{
"filter"
:
filter_list
,
}},
"aggs"
:
{
"alarm_cnt"
:
{
"date_histogram"
:
{
...
...
@@ -47,7 +51,7 @@ async def alarm_count_info(company_ids, start, end, date_type):
"time_zone"
:
"+08:00"
,
"format"
:
_format
,
"min_doc_count"
:
0
,
"extended_bounds"
:
{
"min"
:
_min
,
"max"
:
_max
,},
"extended_bounds"
:
{
"min"
:
_min
,
"max"
:
_max
,
},
},
"aggs"
:
{
"type_cnt"
:
{
"terms"
:
{
"field"
:
"importance"
}}},
},
...
...
@@ -55,21 +59,22 @@ async def alarm_count_info(company_ids, start, end, date_type):
"type_aggs"
:
{
"terms"
:
{
"field"
:
"type.keyword"
}},
},
}
log
.
info
(
"alarm_count_info query_body={}"
.
format
(
query_body
))
async
with
EsUtil
()
as
es
:
es_result
=
await
es
.
search_origin
(
body
=
query_body
,
index
=
constants
.
POINT_1MIN_EVENT
)
es_result
=
await
es
.
search_origin
(
body
=
query_body
,
index
=
constants
.
POINT_1MIN_EVENT
)
buckets
=
es_result
[
"aggregations"
][
"alarm_cnt"
][
"buckets"
]
first_alarm
=
{
"slots"
:
[],
"value"
:
[
0
]
*
len
(
buckets
)}
second_alarm
=
{
"slots"
:
[],
"value"
:
[
0
]
*
len
(
buckets
)}
third_alarm
=
{
"slots"
:
[],
"value"
:
[
0
]
*
len
(
buckets
)}
cid_buckets
=
es_result
[
"aggregations"
][
"cid_aggs"
][
"buckets"
]
cid_alarm_cnt_map
=
{
i
[
"key"
]:
i
[
"doc_count"
]
for
i
in
cid_buckets
}
type_buckets
=
es_result
[
"aggregations"
][
"type_aggs"
][
"buckets"
]
type_alarm_cnt_map
=
{
i
[
"key"
]:
i
[
"doc_count"
]
for
i
in
type_buckets
}
for
index
,
bucket
in
enumerate
(
buckets
):
if
date_type
==
"day"
:
time_str
=
bucket
[
"key_as_string"
][
11
:
16
]
...
...
@@ -78,7 +83,7 @@ async def alarm_count_info(company_ids, start, end, date_type):
first_alarm
[
"slots"
]
.
append
(
time_str
)
second_alarm
[
"slots"
]
.
append
(
time_str
)
third_alarm
[
"slots"
]
.
append
(
time_str
)
if
bucket
[
"type_cnt"
][
"buckets"
]:
for
item
in
bucket
[
"type_cnt"
][
"buckets"
]:
if
item
[
"key"
]
==
Importance
.
First
.
value
:
...
...
@@ -87,7 +92,68 @@ async def alarm_count_info(company_ids, start, end, date_type):
second_alarm
[
"value"
][
index
]
+=
item
[
"doc_count"
]
elif
item
[
"key"
]
==
Importance
.
Third
.
value
:
third_alarm
[
"value"
][
index
]
+=
item
[
"doc_count"
]
log
.
info
(
f
"first_alarm={first_alarm}"
)
log
.
info
(
f
"second_alarm={second_alarm}"
)
log
.
info
(
f
"third_alarm={third_alarm}"
)
return
{
"first_alarm"
:
first_alarm
,
"second_alarm"
:
second_alarm
,
"third_alarm"
:
third_alarm
,
"cid_alarm_cnt_map"
:
cid_alarm_cnt_map
,
"type_alarm_cnt_map"
:
type_alarm_cnt_map
,
}
async
def
alarm_count_info_new15
(
company_ids
,
start
,
end
,
date_type
):
if
date_type
==
"day"
:
date_fmt
=
"DATE_FORMAT(event_datetime,'
%%
H')"
slots
=
day_slots
(
'hours'
)
else
:
# date_type == "month"
date_fmt
=
"DATE_FORMAT(event_datetime,'
%%
m-
%%
d')"
slots
=
proxy_power_slots
(
start
,
end
,
"MM-DD"
,
True
)
alarm_sql
=
f
"""
select {date_fmt} date,importance,count(*) count from point_1min_event
where cid in
%
s and event_datetime between
%
s and
%
s
group by {date_fmt},importance
"""
cid_sql
=
f
"""
select cid,count(*) count from point_1min_event
where cid in
%
s and event_datetime between
%
s and
%
s
group by cid
"""
type_sql
=
f
"""
select event_type,count(*) count from point_1min_event
where cid in
%
s and event_datetime between
%
s and
%
s
group by event_type
"""
async
with
MysqlUtil
()
as
conn
:
args
=
(
company_ids
,
start
,
end
)
alarm_result
=
await
conn
.
fetchall
(
sql
=
alarm_sql
,
args
=
args
)
cid_result
=
await
conn
.
fetchall
(
sql
=
cid_sql
,
args
=
args
)
type_result
=
await
conn
.
fetchall
(
sql
=
type_sql
,
args
=
args
)
first_alarm
=
{
"slots"
:
[],
"value"
:
[
0
]
*
len
(
slots
)}
second_alarm
=
{
"slots"
:
[],
"value"
:
[
0
]
*
len
(
slots
)}
third_alarm
=
{
"slots"
:
[],
"value"
:
[
0
]
*
len
(
slots
)}
cid_alarm_cnt_map
=
{
i
[
"cid"
]:
i
[
"count"
]
for
i
in
cid_result
}
type_alarm_cnt_map
=
{
i
[
"event_type"
]:
i
[
"count"
]
for
i
in
type_result
}
for
index
,
slot
in
enumerate
(
slots
):
show_slot
=
slot
+
":00"
if
date_type
==
"day"
else
slot
first_alarm
[
"slots"
]
.
append
(
show_slot
)
second_alarm
[
"slots"
]
.
append
(
show_slot
)
third_alarm
[
"slots"
]
.
append
(
show_slot
)
for
item
in
alarm_result
:
if
item
.
get
(
"date"
)
==
slot
:
if
item
[
"importance"
]
==
Importance
.
First
.
value
:
first_alarm
[
"value"
][
index
]
+=
item
[
"count"
]
elif
item
[
"importance"
]
==
Importance
.
Second
.
value
:
second_alarm
[
"value"
][
index
]
+=
item
[
"count"
]
elif
item
[
"importance"
]
==
Importance
.
Third
.
value
:
third_alarm
[
"value"
][
index
]
+=
item
[
"count"
]
log
.
info
(
f
"first_alarm={first_alarm}"
)
log
.
info
(
f
"second_alarm={second_alarm}"
)
log
.
info
(
f
"third_alarm={third_alarm}"
)
...
...
@@ -111,7 +177,8 @@ async def alarm_content_time_distribution(company_ids, start, end, date_type):
"""
start_dt
=
datetime
.
strptime
(
start
,
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
end_dt
=
datetime
.
strptime
(
end
,
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
es_start_str
=
datetime
(
year
=
start_dt
.
year
,
month
=
start_dt
.
month
,
day
=
start_dt
.
day
)
.
strftime
(
es_start_str
=
datetime
(
year
=
start_dt
.
year
,
month
=
start_dt
.
month
,
day
=
start_dt
.
day
)
.
strftime
(
"
%
Y-
%
m-
%
dT
%
H:
%
M:
%
S+08:00"
)
es_end_str
=
end_dt
.
strftime
(
"
%
Y-
%
m-
%
dT
%
H:
%
M:
%
S+08:00"
)
...
...
@@ -120,7 +187,7 @@ async def alarm_content_time_distribution(company_ids, start, end, date_type):
_format
=
"yyyy-MM-dd HH:mm:ss"
_min
=
start_dt
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
_max
=
end_dt
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
else
:
# date_type == "month"
interval
=
"day"
...
...
@@ -130,10 +197,10 @@ async def alarm_content_time_distribution(company_ids, start, end, date_type):
filter_list
=
[
{
"range"
:
{
"datetime"
:
{
"gte"
:
es_start_str
,
"lte"
:
es_end_str
,
}}},
{
"terms"
:
{
"cid"
:
company_ids
}}]
query_body
=
{
"size"
:
0
,
"query"
:
{
"bool"
:
{
"filter"
:
filter_list
,}},
"query"
:
{
"bool"
:
{
"filter"
:
filter_list
,
}},
"aggs"
:
{
"alarm_cnt"
:
{
"date_histogram"
:
{
...
...
@@ -142,16 +209,18 @@ async def alarm_content_time_distribution(company_ids, start, end, date_type):
"time_zone"
:
"+08:00"
,
"format"
:
_format
,
"min_doc_count"
:
0
,
"extended_bounds"
:
{
"min"
:
_min
,
"max"
:
_max
,},
"extended_bounds"
:
{
"min"
:
_min
,
"max"
:
_max
,
},
},
"aggs"
:
{
"type_cnt"
:
{
"terms"
:
{
"field"
:
"type.keyword"
,
"size"
:
10000
}}},
"aggs"
:
{
"type_cnt"
:
{
"terms"
:
{
"field"
:
"type.keyword"
,
"size"
:
10000
}}},
}
},
}
log
.
info
(
"alarm_count_info query_body={}"
.
format
(
query_body
))
async
with
EsUtil
()
as
es
:
es_result
=
await
es
.
search_origin
(
body
=
query_body
,
index
=
constants
.
POINT_1MIN_EVENT
)
es_result
=
await
es
.
search_origin
(
body
=
query_body
,
index
=
constants
.
POINT_1MIN_EVENT
)
buckets
=
es_result
[
"aggregations"
][
"alarm_cnt"
][
"buckets"
]
temperature
=
{
"slots"
:
[],
"value"
:
[
0
]
*
len
(
buckets
)}
residual_current
=
{
"slots"
:
[],
"value"
:
[
0
]
*
len
(
buckets
)}
...
...
@@ -164,7 +233,7 @@ async def alarm_content_time_distribution(company_ids, start, end, date_type):
"threephase_imbalance"
:
0
,
"load_rate"
:
0
,
}
for
index
,
bucket
in
enumerate
(
buckets
):
if
date_type
==
"day"
:
time_str
=
bucket
[
"key_as_string"
][
11
:
16
]
...
...
@@ -173,7 +242,7 @@ async def alarm_content_time_distribution(company_ids, start, end, date_type):
temperature
[
"slots"
]
.
append
(
time_str
)
residual_current
[
"slots"
]
.
append
(
time_str
)
electric_param
[
"slots"
]
.
append
(
time_str
)
if
bucket
[
"type_cnt"
][
"buckets"
]:
for
item
in
bucket
[
"type_cnt"
][
"buckets"
]:
if
item
[
"key"
]
in
[
...
...
@@ -208,15 +277,17 @@ async def alarm_content_time_distribution(company_ids, start, end, date_type):
"underPhasePF"
,
# 单相功率因数越下限
"underTotalPF"
,
# 总功率因数越下限
]:
electric_param_detail
[
"power_factor"
]
+=
item
[
"doc_count"
]
electric_param_detail
[
"power_factor"
]
+=
item
[
"doc_count"
]
elif
item
[
"key"
]
in
[
"unbalanceI"
,
# 三相电流不平衡度
"unbalanceU"
,
# 三相电压不平衡度
]:
electric_param_detail
[
"threephase_imbalance"
]
+=
item
[
"doc_count"
]
electric_param_detail
[
"threephase_imbalance"
]
+=
item
[
"doc_count"
]
elif
item
[
"key"
]
in
[
"overPR"
]:
electric_param_detail
[
"load_rate"
]
+=
item
[
"doc_count"
]
log
.
info
(
f
"temperature={temperature}"
)
log
.
info
(
f
"residual_current={residual_current}"
)
log
.
info
(
f
"electric_param={electric_param}"
)
...
...
@@ -239,7 +310,8 @@ async def alarm_summary(company_ids, start, end, date_type):
"""
start_dt
=
datetime
.
strptime
(
start
,
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
end_dt
=
datetime
.
strptime
(
end
,
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
es_start_str
=
datetime
(
year
=
start_dt
.
year
,
month
=
start_dt
.
month
,
day
=
start_dt
.
day
)
.
strftime
(
es_start_str
=
datetime
(
year
=
start_dt
.
year
,
month
=
start_dt
.
month
,
day
=
start_dt
.
day
)
.
strftime
(
"
%
Y-
%
m-
%
dT
%
H:
%
M:
%
S+08:00"
)
es_end_str
=
end_dt
.
strftime
(
"
%
Y-
%
m-
%
dT
%
H:
%
M:
%
S+08:00"
)
...
...
@@ -247,19 +319,19 @@ async def alarm_summary(company_ids, start, end, date_type):
_format
=
"yyyy-MM-dd HH:mm:ss"
_min
=
start_dt
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
_max
=
end_dt
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
else
:
# date_type == "month"
_format
=
"yyyy-MM-dd"
_min
=
start_dt
.
strftime
(
"
%
Y-
%
m-
%
d"
)
_max
=
end_dt
.
strftime
(
"
%
Y-
%
m-
%
d"
)
filter_list
=
[
{
"range"
:
{
"datetime"
:
{
"gte"
:
es_start_str
,
"lte"
:
es_end_str
,}}},
{
"range"
:
{
"datetime"
:
{
"gte"
:
es_start_str
,
"lte"
:
es_end_str
,
}}},
{
"term"
:
{
"mode"
:
"alarm"
}},
]
filter_list
.
append
({
"terms"
:
{
"cid"
:
company_ids
}})
query_body
=
{
"query"
:
{
"bool"
:
{
"filter"
:
filter_list
}},
"size"
:
0
,
...
...
@@ -281,22 +353,27 @@ async def alarm_summary(company_ids, start, end, date_type):
}
},
}
log
.
info
(
"alarm_summary query_body={}"
.
format
(
query_body
))
async
with
EsUtil
()
as
es
:
es_result
=
await
es
.
search_origin
(
body
=
query_body
,
index
=
constants
.
POINT_1MIN_EVENT
)
es_result
=
await
es
.
search_origin
(
body
=
query_body
,
index
=
constants
.
POINT_1MIN_EVENT
)
print
(
f
"es_result = {es_result}"
)
buckets
=
es_result
[
"aggregations"
][
"cid_aggs"
][
"buckets"
]
or
[]
total_alarm_cnt
,
alarm_company_cnt
=
sum
([
i
[
"doc_count"
]
for
i
in
buckets
]),
len
(
buckets
)
total_alarm_cnt
,
alarm_company_cnt
=
sum
(
[
i
[
"doc_count"
]
for
i
in
buckets
]),
len
(
buckets
)
cid_alarmcnt_list
=
[
i
[
"doc_count"
]
for
i
in
buckets
]
safe_run_map
=
await
proxy_safe_run_info
(
company_ids
,
start_time_str
=
start
,
end_time_str
=
end
)
safe_run_map
=
await
proxy_safe_run_info
(
company_ids
,
start_time_str
=
start
,
end_time_str
=
end
)
log
.
info
(
f
"alarm_summary safe_run_map======{safe_run_map}"
)
safe_run_days
=
sum
([
safe_run_map
[
cid
][
"safe_run_days"
]
for
cid
in
safe_run_map
])
safe_run_days
=
sum
(
[
safe_run_map
[
cid
][
"safe_run_days"
]
for
cid
in
safe_run_map
])
# 时间分布: 白天, 黑夜, 凌晨
time_distribution_map
=
await
alarm_time_distribution
(
company_ids
,
start
,
end
)
time_distribution_map
=
await
alarm_time_distribution
(
company_ids
,
start
,
end
)
total_valid_company
=
0
for
cid
in
safe_run_map
:
# 选择的这段时间,客户必须已经接入进来才算
...
...
@@ -305,10 +382,12 @@ async def alarm_summary(company_ids, start, end, date_type):
summary_map
=
{
"total_alarm_cnt"
:
total_alarm_cnt
,
"alarm_company_cnt"
:
alarm_company_cnt
,
"avg_alarm_cnt"
:
round
(
total_alarm_cnt
/
alarm_company_cnt
,
1
)
if
alarm_company_cnt
else
0
,
"avg_alarm_cnt"
:
round
(
total_alarm_cnt
/
alarm_company_cnt
,
1
)
if
alarm_company_cnt
else
0
,
"max_alarm_cnt"
:
max
(
cid_alarmcnt_list
)
if
cid_alarmcnt_list
else
0
,
"safe_run_days"
:
safe_run_days
,
"avg_safe_run_days"
:
round
(
safe_run_days
/
total_valid_company
,
1
)
if
total_valid_company
else
0
,
"avg_safe_run_days"
:
round
(
safe_run_days
/
total_valid_company
,
1
)
if
total_valid_company
else
0
,
"day_alarm_cnt"
:
time_distribution_map
[
"day_alarm_cnt"
],
"night_alarm_cnt"
:
time_distribution_map
[
"night_alarm_cnt"
],
"morning_alarm_cnt"
:
time_distribution_map
[
"morning_alarm_cnt"
],
...
...
unify_api/modules/home_page/views/count_info_proxy.py
View file @
3eee08fd
...
...
@@ -43,7 +43,7 @@ from unify_api.modules.home_page.procedures.count_info_proxy_pds import (
total_run_day_proxy
,
)
from
unify_api.modules.home_page.procedures.security_info_pds
import
\
alarm_count_info
alarm_count_info
_new15
from
unify_api.modules.home_page.service.count_info_service
import
\
safe_run_sdu
,
safe_run_sdu_new15
from
unify_api.modules.elec_charge.components.elec_charge_cps
import
\
...
...
@@ -225,7 +225,7 @@ async def post_reg_alarm_distribution(request,
if
product
==
Product
.
RecognitionElectric
.
value
:
user_id
=
request
.
ctx
.
user_id
cids
=
await
get_cids
(
user_id
,
product
)
alarm_info_map
=
await
alarm_count_info
(
cids
,
start
,
end
,
date_type
)
alarm_info_map
=
await
alarm_count_info
_new15
(
cids
,
start
,
end
,
date_type
)
type_alarm_cnt_map
=
alarm_info_map
[
"type_alarm_cnt_map"
]
return
AlarmDistributionResp
(
alarm_categories
=
RegAlarmCnt
(
...
...
@@ -250,7 +250,7 @@ async def post_reg_alarm_rank(request,
if
product
==
Product
.
RecognitionElectric
.
value
:
user_id
=
request
.
ctx
.
user_id
cids
=
await
get_cids
(
user_id
,
product
)
alarm_info_map
=
await
alarm_count_info
(
cids
,
start
,
end
,
date_type
)
alarm_info_map
=
await
alarm_count_info
_new15
(
cids
,
start
,
end
,
date_type
)
cid_alarm_cnt_map
=
alarm_info_map
[
"cid_alarm_cnt_map"
]
cid_info_map
=
await
get_cid_info
(
all
=
True
)
...
...
unify_api/modules/home_page/views/security_info.py
View file @
3eee08fd
...
...
@@ -19,9 +19,8 @@ from unify_api.modules.home_page.components.security_info_cps import (
AlarmSummaryResp
,
)
from
unify_api.modules.home_page.procedures.security_info_pds
import
(
alarm_count_info
,
alarm_content_time_distribution
,
alarm_summary
,
alarm_summary
,
alarm_count_info_new15
,
)
...
...
@@ -43,7 +42,7 @@ async def post_security_index(request, body: SecurityCountReq) -> SecurityCountR
elif
product
==
Product
.
RecognitionElectric
.
value
:
user_id
=
request
.
ctx
.
user_id
cids
=
await
get_cids
(
user_id
,
product
)
alarm_info_map
=
await
alarm_count_info
(
cids
,
start
,
end
,
date_type
)
alarm_info_map
=
await
alarm_count_info
_new15
(
cids
,
start
,
end
,
date_type
)
first_alarm
,
second_alarm
,
third_alarm
=
(
alarm_info_map
[
"first_alarm"
],
alarm_info_map
[
"second_alarm"
],
...
...
@@ -84,7 +83,7 @@ async def post_alarm_level_distribution(request, body: SecurityCommonReq) -> Sec
else
:
raise
BusinessException
(
message
=
f
"暂时不支持其他产品"
)
alarm_info_map
=
await
alarm_count_info
(
req_cids
,
start
,
end
,
date_type
)
alarm_info_map
=
await
alarm_count_info
_new15
(
req_cids
,
start
,
end
,
date_type
)
first_alarm
,
second_alarm
,
third_alarm
=
(
alarm_info_map
[
"first_alarm"
],
alarm_info_map
[
"second_alarm"
],
...
...
unify_api/utils/time_format.py
View file @
3eee08fd
...
...
@@ -134,14 +134,19 @@ def year_slots(start, end):
return
slots
def
day_slots
():
def
day_slots
(
type
=
'minutes'
):
"""
获取一天时间点
"""
dt
=
my_pendulum
.
now
()
.
start_of
(
'day'
)
slots
=
[
dt
.
add
(
minutes
=
i
)
.
format
(
"HH:mm"
)
for
i
in
range
(
1440
)
]
if
type
==
'minutes'
:
slots
=
[
dt
.
add
(
minutes
=
i
)
.
format
(
"HH:mm"
)
for
i
in
range
(
1440
)
]
else
:
slots
=
[
dt
.
add
(
hours
=
i
)
.
format
(
"HH"
)
for
i
in
range
(
24
)
]
return
slots
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment