Commit 3eee08fd authored by lcn's avatar lcn

BUG修复

parent 121f1444
......@@ -20,7 +20,7 @@ from unify_api.modules.common.procedures.points import points_by_storeys
from unify_api.modules.home_page.components.security_info_cps import \
SecurityCountResp, LevelCount, ContentCount, AlarmContentDistributionResp
from unify_api.modules.home_page.procedures.security_info_pds import \
alarm_count_info, alarm_content_time_distribution
alarm_content_time_distribution, alarm_count_info_new15
from unify_api.utils.common_utils import round_1, division_two
......@@ -418,7 +418,7 @@ async def sdu_index_alarm_ranking_service_new15(cid, start, end, product):
async def zdu_level_distribution_service(cid, start, end, product):
"""报警统计-报警等级-智电u"""
alarm_info_map = await alarm_count_info([cid], start, end, "month")
alarm_info_map = await alarm_count_info_new15([cid], start, end, "month")
first_alarm, second_alarm, third_alarm = (
alarm_info_map["first_alarm"],
alarm_info_map["second_alarm"],
......
......@@ -132,7 +132,7 @@ async def pttl_max_new15(cid, start, end, point_id=None, inline_id=None):
# 根据时间范围, 返回不同时间格式
if max_val_time:
if date_type == "day":
max_val_time = str(max_val_time)[:10]
max_val_time = str(max_val_time)[11:16]
elif date_type == "month":
max_val_time = str(max_val_time)[5:10]
else:
......
......@@ -5,18 +5,22 @@ from datetime import datetime
from pot_libs.es_util.es_utils import EsUtil
from pot_libs.logger import log
from pot_libs.mysql_util.mysql_util import MysqlUtil
from unify_api import constants
from unify_api.constants import Importance, Product
from unify_api.modules.common.procedures.common_cps import (
proxy_safe_run_info,
alarm_time_distribution,
)
from unify_api.utils.time_format import get_start_end_by_tz_time_new, \
proxy_power_slots, day_slots
async def alarm_count_info(company_ids, start, end, date_type):
start_dt = datetime.strptime(start, "%Y-%m-%d %H:%M:%S")
end_dt = datetime.strptime(end, "%Y-%m-%d %H:%M:%S")
es_start_str = datetime(year=start_dt.year, month=start_dt.month, day=start_dt.day).strftime(
es_start_str = datetime(year=start_dt.year, month=start_dt.month,
day=start_dt.day).strftime(
"%Y-%m-%dT%H:%M:%S+08:00"
)
es_end_str = end_dt.strftime("%Y-%m-%dT%H:%M:%S+08:00")
......@@ -38,7 +42,7 @@ async def alarm_count_info(company_ids, start, end, date_type):
query_body = {
"size": 0,
"query": {"bool": {"filter": filter_list,}},
"query": {"bool": {"filter": filter_list, }},
"aggs": {
"alarm_cnt": {
"date_histogram": {
......@@ -47,7 +51,7 @@ async def alarm_count_info(company_ids, start, end, date_type):
"time_zone": "+08:00",
"format": _format,
"min_doc_count": 0,
"extended_bounds": {"min": _min, "max": _max,},
"extended_bounds": {"min": _min, "max": _max, },
},
"aggs": {"type_cnt": {"terms": {"field": "importance"}}},
},
......@@ -58,7 +62,8 @@ async def alarm_count_info(company_ids, start, end, date_type):
log.info("alarm_count_info query_body={}".format(query_body))
async with EsUtil() as es:
es_result = await es.search_origin(body=query_body, index=constants.POINT_1MIN_EVENT)
es_result = await es.search_origin(body=query_body,
index=constants.POINT_1MIN_EVENT)
buckets = es_result["aggregations"]["alarm_cnt"]["buckets"]
first_alarm = {"slots": [], "value": [0] * len(buckets)}
second_alarm = {"slots": [], "value": [0] * len(buckets)}
......@@ -100,6 +105,67 @@ async def alarm_count_info(company_ids, start, end, date_type):
}
async def alarm_count_info_new15(company_ids, start, end, date_type):
if date_type == "day":
date_fmt = "DATE_FORMAT(event_datetime,'%%H')"
slots = day_slots('hours')
else:
# date_type == "month"
date_fmt = "DATE_FORMAT(event_datetime,'%%m-%%d')"
slots = proxy_power_slots(start, end, "MM-DD", True)
alarm_sql = f"""
select {date_fmt} date,importance,count(*) count from point_1min_event
where cid in %s and event_datetime between %s and %s
group by {date_fmt},importance
"""
cid_sql = f"""
select cid,count(*) count from point_1min_event
where cid in %s and event_datetime between %s and %s
group by cid
"""
type_sql = f"""
select event_type,count(*) count from point_1min_event
where cid in %s and event_datetime between %s and %s
group by event_type
"""
async with MysqlUtil() as conn:
args = (company_ids, start, end)
alarm_result = await conn.fetchall(sql=alarm_sql, args=args)
cid_result = await conn.fetchall(sql=cid_sql, args=args)
type_result = await conn.fetchall(sql=type_sql, args=args)
first_alarm = {"slots": [], "value": [0] * len(slots)}
second_alarm = {"slots": [], "value": [0] * len(slots)}
third_alarm = {"slots": [], "value": [0] * len(slots)}
cid_alarm_cnt_map = {i["cid"]: i["count"] for i in cid_result}
type_alarm_cnt_map = {i["event_type"]: i["count"] for i in type_result}
for index, slot in enumerate(slots):
show_slot = slot + ":00" if date_type == "day" else slot
first_alarm["slots"].append(show_slot)
second_alarm["slots"].append(show_slot)
third_alarm["slots"].append(show_slot)
for item in alarm_result:
if item.get("date") == slot:
if item["importance"] == Importance.First.value:
first_alarm["value"][index] += item["count"]
elif item["importance"] == Importance.Second.value:
second_alarm["value"][index] += item["count"]
elif item["importance"] == Importance.Third.value:
third_alarm["value"][index] += item["count"]
log.info(f"first_alarm={first_alarm}")
log.info(f"second_alarm={second_alarm}")
log.info(f"third_alarm={third_alarm}")
return {
"first_alarm": first_alarm,
"second_alarm": second_alarm,
"third_alarm": third_alarm,
"cid_alarm_cnt_map": cid_alarm_cnt_map,
"type_alarm_cnt_map": type_alarm_cnt_map,
}
async def alarm_content_time_distribution(company_ids, start, end, date_type):
"""
电参数,温度,漏电流时间分布
......@@ -111,7 +177,8 @@ async def alarm_content_time_distribution(company_ids, start, end, date_type):
"""
start_dt = datetime.strptime(start, "%Y-%m-%d %H:%M:%S")
end_dt = datetime.strptime(end, "%Y-%m-%d %H:%M:%S")
es_start_str = datetime(year=start_dt.year, month=start_dt.month, day=start_dt.day).strftime(
es_start_str = datetime(year=start_dt.year, month=start_dt.month,
day=start_dt.day).strftime(
"%Y-%m-%dT%H:%M:%S+08:00"
)
es_end_str = end_dt.strftime("%Y-%m-%dT%H:%M:%S+08:00")
......@@ -133,7 +200,7 @@ async def alarm_content_time_distribution(company_ids, start, end, date_type):
query_body = {
"size": 0,
"query": {"bool": {"filter": filter_list,}},
"query": {"bool": {"filter": filter_list, }},
"aggs": {
"alarm_cnt": {
"date_histogram": {
......@@ -142,16 +209,18 @@ async def alarm_content_time_distribution(company_ids, start, end, date_type):
"time_zone": "+08:00",
"format": _format,
"min_doc_count": 0,
"extended_bounds": {"min": _min, "max": _max,},
"extended_bounds": {"min": _min, "max": _max, },
},
"aggs": {"type_cnt": {"terms": {"field": "type.keyword", "size": 10000}}},
"aggs": {"type_cnt": {
"terms": {"field": "type.keyword", "size": 10000}}},
}
},
}
log.info("alarm_count_info query_body={}".format(query_body))
async with EsUtil() as es:
es_result = await es.search_origin(body=query_body, index=constants.POINT_1MIN_EVENT)
es_result = await es.search_origin(body=query_body,
index=constants.POINT_1MIN_EVENT)
buckets = es_result["aggregations"]["alarm_cnt"]["buckets"]
temperature = {"slots": [], "value": [0] * len(buckets)}
residual_current = {"slots": [], "value": [0] * len(buckets)}
......@@ -208,12 +277,14 @@ async def alarm_content_time_distribution(company_ids, start, end, date_type):
"underPhasePF", # 单相功率因数越下限
"underTotalPF", # 总功率因数越下限
]:
electric_param_detail["power_factor"] += item["doc_count"]
electric_param_detail["power_factor"] += item[
"doc_count"]
elif item["key"] in [
"unbalanceI", # 三相电流不平衡度
"unbalanceU", # 三相电压不平衡度
]:
electric_param_detail["threephase_imbalance"] += item["doc_count"]
electric_param_detail["threephase_imbalance"] += item[
"doc_count"]
elif item["key"] in ["overPR"]:
electric_param_detail["load_rate"] += item["doc_count"]
......@@ -239,7 +310,8 @@ async def alarm_summary(company_ids, start, end, date_type):
"""
start_dt = datetime.strptime(start, "%Y-%m-%d %H:%M:%S")
end_dt = datetime.strptime(end, "%Y-%m-%d %H:%M:%S")
es_start_str = datetime(year=start_dt.year, month=start_dt.month, day=start_dt.day).strftime(
es_start_str = datetime(year=start_dt.year, month=start_dt.month,
day=start_dt.day).strftime(
"%Y-%m-%dT%H:%M:%S+08:00"
)
es_end_str = end_dt.strftime("%Y-%m-%dT%H:%M:%S+08:00")
......@@ -254,7 +326,7 @@ async def alarm_summary(company_ids, start, end, date_type):
_min = start_dt.strftime("%Y-%m-%d")
_max = end_dt.strftime("%Y-%m-%d")
filter_list = [
{"range": {"datetime": {"gte": es_start_str, "lte": es_end_str,}}},
{"range": {"datetime": {"gte": es_start_str, "lte": es_end_str, }}},
{"term": {"mode": "alarm"}},
]
......@@ -284,18 +356,23 @@ async def alarm_summary(company_ids, start, end, date_type):
log.info("alarm_summary query_body={}".format(query_body))
async with EsUtil() as es:
es_result = await es.search_origin(body=query_body, index=constants.POINT_1MIN_EVENT)
es_result = await es.search_origin(body=query_body,
index=constants.POINT_1MIN_EVENT)
print(f"es_result = {es_result}")
buckets = es_result["aggregations"]["cid_aggs"]["buckets"] or []
total_alarm_cnt, alarm_company_cnt = sum([i["doc_count"] for i in buckets]), len(buckets)
total_alarm_cnt, alarm_company_cnt = sum(
[i["doc_count"] for i in buckets]), len(buckets)
cid_alarmcnt_list = [i["doc_count"] for i in buckets]
safe_run_map = await proxy_safe_run_info(company_ids, start_time_str=start, end_time_str=end)
safe_run_map = await proxy_safe_run_info(company_ids, start_time_str=start,
end_time_str=end)
log.info(f"alarm_summary safe_run_map======{safe_run_map}")
safe_run_days = sum([safe_run_map[cid]["safe_run_days"] for cid in safe_run_map])
safe_run_days = sum(
[safe_run_map[cid]["safe_run_days"] for cid in safe_run_map])
# 时间分布: 白天, 黑夜, 凌晨
time_distribution_map = await alarm_time_distribution(company_ids, start, end)
time_distribution_map = await alarm_time_distribution(company_ids, start,
end)
total_valid_company = 0
for cid in safe_run_map:
......@@ -305,10 +382,12 @@ async def alarm_summary(company_ids, start, end, date_type):
summary_map = {
"total_alarm_cnt": total_alarm_cnt,
"alarm_company_cnt": alarm_company_cnt,
"avg_alarm_cnt": round(total_alarm_cnt / alarm_company_cnt, 1) if alarm_company_cnt else 0,
"avg_alarm_cnt": round(total_alarm_cnt / alarm_company_cnt,
1) if alarm_company_cnt else 0,
"max_alarm_cnt": max(cid_alarmcnt_list) if cid_alarmcnt_list else 0,
"safe_run_days": safe_run_days,
"avg_safe_run_days": round(safe_run_days / total_valid_company, 1) if total_valid_company else 0,
"avg_safe_run_days": round(safe_run_days / total_valid_company,
1) if total_valid_company else 0,
"day_alarm_cnt": time_distribution_map["day_alarm_cnt"],
"night_alarm_cnt": time_distribution_map["night_alarm_cnt"],
"morning_alarm_cnt": time_distribution_map["morning_alarm_cnt"],
......
......@@ -43,7 +43,7 @@ from unify_api.modules.home_page.procedures.count_info_proxy_pds import (
total_run_day_proxy,
)
from unify_api.modules.home_page.procedures.security_info_pds import \
alarm_count_info
alarm_count_info_new15
from unify_api.modules.home_page.service.count_info_service import \
safe_run_sdu, safe_run_sdu_new15
from unify_api.modules.elec_charge.components.elec_charge_cps import \
......@@ -225,7 +225,7 @@ async def post_reg_alarm_distribution(request,
if product == Product.RecognitionElectric.value:
user_id = request.ctx.user_id
cids = await get_cids(user_id, product)
alarm_info_map = await alarm_count_info(cids, start, end, date_type)
alarm_info_map = await alarm_count_info_new15(cids, start, end, date_type)
type_alarm_cnt_map = alarm_info_map["type_alarm_cnt_map"]
return AlarmDistributionResp(
alarm_categories=RegAlarmCnt(
......@@ -250,7 +250,7 @@ async def post_reg_alarm_rank(request,
if product == Product.RecognitionElectric.value:
user_id = request.ctx.user_id
cids = await get_cids(user_id, product)
alarm_info_map = await alarm_count_info(cids, start, end, date_type)
alarm_info_map = await alarm_count_info_new15(cids, start, end, date_type)
cid_alarm_cnt_map = alarm_info_map["cid_alarm_cnt_map"]
cid_info_map = await get_cid_info(all=True)
......
......@@ -19,9 +19,8 @@ from unify_api.modules.home_page.components.security_info_cps import (
AlarmSummaryResp,
)
from unify_api.modules.home_page.procedures.security_info_pds import (
alarm_count_info,
alarm_content_time_distribution,
alarm_summary,
alarm_summary, alarm_count_info_new15,
)
......@@ -43,7 +42,7 @@ async def post_security_index(request, body: SecurityCountReq) -> SecurityCountR
elif product == Product.RecognitionElectric.value:
user_id = request.ctx.user_id
cids = await get_cids(user_id, product)
alarm_info_map = await alarm_count_info(cids, start, end, date_type)
alarm_info_map = await alarm_count_info_new15(cids, start, end, date_type)
first_alarm, second_alarm, third_alarm = (
alarm_info_map["first_alarm"],
alarm_info_map["second_alarm"],
......@@ -84,7 +83,7 @@ async def post_alarm_level_distribution(request, body: SecurityCommonReq) -> Sec
else:
raise BusinessException(message=f"暂时不支持其他产品")
alarm_info_map = await alarm_count_info(req_cids, start, end, date_type)
alarm_info_map = await alarm_count_info_new15(req_cids, start, end, date_type)
first_alarm, second_alarm, third_alarm = (
alarm_info_map["first_alarm"],
alarm_info_map["second_alarm"],
......
......@@ -134,14 +134,19 @@ def year_slots(start, end):
return slots
def day_slots():
def day_slots(type='minutes'):
"""
获取一天时间点
"""
dt = my_pendulum.now().start_of('day')
if type == 'minutes':
slots = [
dt.add(minutes=i).format("HH:mm") for i in range(1440)
]
else:
slots = [
dt.add(hours=i).format("HH") for i in range(24)
]
return slots
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment