Commit fd3450a0 authored by lcn's avatar lcn

修改安电管理版

parent 91a67783
...@@ -474,3 +474,6 @@ ELECTRIC_PARAM_MAP = { ...@@ -474,3 +474,6 @@ ELECTRIC_PARAM_MAP = {
"unbalanceU", # 三相电压不平衡度 "unbalanceU", # 三相电压不平衡度
"overPR" "overPR"
} }
CST = "Asia/Shanghai"
\ No newline at end of file
...@@ -145,7 +145,7 @@ async def list_alarm_zdu_service(cid, point_list, page_num, page_size, start, ...@@ -145,7 +145,7 @@ async def list_alarm_zdu_service(cid, point_list, page_num, page_size, start,
results = await list_alarm_zdu_dao_new15(cid, point_list, start, end, results = await list_alarm_zdu_dao_new15(cid, point_list, start, end,
importance, event_type) importance, event_type)
real_total = len(results) real_total = len(results)
results = results[(page_num-1)*page_size, page_num*page_size] results = results[(page_num - 1) * page_size, page_num * page_size]
# 2. 获取工厂, 报警type对应的描述信息 # 2. 获取工厂, 报警type对应的描述信息
event_dic = await company_extend_dao(cid) event_dic = await company_extend_dao(cid)
event_dic_map = {event["key"]: event for event in event_dic} event_dic_map = {event["key"]: event for event in event_dic}
...@@ -253,7 +253,10 @@ async def list_alarm_service_new15(cid, point_id, start, end, importance, ...@@ -253,7 +253,10 @@ async def list_alarm_service_new15(cid, point_id, start, end, importance,
if point_id: if point_id:
li.append(f"pid={point_id}") li.append(f"pid={point_id}")
else: else:
li.append(f"cid={cid}") if not isinstance(cid, list):
cid = [cid]
cid_where = str(tuple(cid)).replace(",)", ")")
li.append(f"cid in {cid_where}")
if start and end: if start and end:
li.append(f"event_datetime BETWEEN '{start}' and '{end}'") li.append(f"event_datetime BETWEEN '{start}' and '{end}'")
if importance: if importance:
...@@ -270,7 +273,7 @@ async def list_alarm_service_new15(cid, point_id, start, end, importance, ...@@ -270,7 +273,7 @@ async def list_alarm_service_new15(cid, point_id, start, end, importance,
li.append(f"event_type in {str(tuple(alarm_type)).strip(',')}") li.append(f"event_type in {str(tuple(alarm_type)).strip(',')}")
mid_sql = " and ".join(li) mid_sql = " and ".join(li)
total = await get_total_list_alarm_dao(mid_sql) total = await get_total_list_alarm_dao(mid_sql)
mid_sql2 = " and ".join(["point_1min_event."+i for i in li]) mid_sql2 = " and ".join(["point_1min_event." + i for i in li])
datas = await get_list_alarm_dao(mid_sql2, page_size, page_num) datas = await get_list_alarm_dao(mid_sql2, page_size, page_num)
rows = [] rows = []
for data in datas: for data in datas:
...@@ -279,7 +282,10 @@ async def list_alarm_service_new15(cid, point_id, start, end, importance, ...@@ -279,7 +282,10 @@ async def list_alarm_service_new15(cid, point_id, start, end, importance,
type_str = constants.EVENT_TYPE_MAP.get(event_type) type_str = constants.EVENT_TYPE_MAP.get(event_type)
location_id = data.get("lid") location_id = data.get("lid")
es_id = data.get("id") es_id = data.get("id")
if location_id and (event_type in constants.TEMP_SCOPE_URL_TYPE): if point_id and data.get("event_mode") == "scope":
url = "/scope_details?doc_id=%s" % es_id
redirect_type = "scope"
elif location_id and type in constants.TEMP_SCOPE_URL_TYPE:
url = "/temp_trend?doc_id=%s" % es_id url = "/temp_trend?doc_id=%s" % es_id
redirect_type = "temp_trend" redirect_type = "temp_trend"
else: else:
......
...@@ -18,6 +18,7 @@ from unify_api.modules.alarm_manager.service.list_alarm_service import \ ...@@ -18,6 +18,7 @@ from unify_api.modules.alarm_manager.service.list_alarm_service import \
wx_list_alarm_zdu_service, list_alarm_service_new15 wx_list_alarm_zdu_service, list_alarm_service_new15
from unify_api.modules.common.procedures.cids import get_cid_info, get_cids, \ from unify_api.modules.common.procedures.cids import get_cid_info, get_cids, \
get_proxy_cids get_proxy_cids
from unify_api.modules.users.procedures.jwt_user import jwt_user
from unify_api.utils import time_format from unify_api.utils import time_format
from unify_api import constants from unify_api import constants
from pot_libs.common.components.query import PageRequest, Equal, Range, Filter, \ from pot_libs.common.components.query import PageRequest, Equal, Range, Filter, \
...@@ -55,7 +56,31 @@ async def post_list_alarm(req, body: PageRequest) -> ListAlarmResponse: ...@@ -55,7 +56,31 @@ async def post_list_alarm(req, body: PageRequest) -> ListAlarmResponse:
alarm_type = in_group.group alarm_type = in_group.group
elif in_group.field == 'importance': elif in_group.field == 'importance':
importance = in_group.group importance = in_group.group
return await list_alarm_service_new15(cid, point_id, start, end, cids = []
if req.json.get("product") == Product.AndianUManage.value:
proxy_id = req.json.get("proxy_id")
product = req.json.get("product")
user_id = jwt_user(req)
req_cids = req.json.get("cids")
# cids = await get_cids(user_id, product)
proxy_cids = await get_proxy_cids(user_id, product, proxy_id)
cids = list(set(req_cids) & set(proxy_cids))
if req.json.get("product") in [Product.RecognitionElectric.value,
Product.IntelligentU.value]:
if not cid:
product = req.json.get("product")
user_id = jwt_user(req)
cids = await get_cids(user_id, product)
else:
cids = [cid]
if not cids and cid:
cids = [cid]
if not cids:
raise BusinessException(message=f"你没有工厂权限")
return await list_alarm_service_new15(cids, point_id, start, end,
importance, page_size, page_num, importance, page_size, page_num,
alarm_type) alarm_type)
...@@ -127,7 +152,8 @@ async def post_list_alarm_bak(req, body: PageRequest) -> ListAlarmResponse: ...@@ -127,7 +152,8 @@ async def post_list_alarm_bak(req, body: PageRequest) -> ListAlarmResponse:
) )
query_body = EsQuery().query(page_request) query_body = EsQuery().query(page_request)
if not query_body.get("query"): if not query_body.get("query"):
query = {"bool": {"must_not": [{"terms": {"mode.keyword": ["scope"]}}]}} query = {
"bool": {"must_not": [{"terms": {"mode.keyword": ["scope"]}}]}}
query_body["query"] = query query_body["query"] = query
else: else:
must_not = [{"terms": {"mode.keyword": ["scope"]}}] must_not = [{"terms": {"mode.keyword": ["scope"]}}]
...@@ -206,7 +232,6 @@ async def post_new_list_alarm(req, body: NlaReq) -> ListAlarmResponse: ...@@ -206,7 +232,6 @@ async def post_new_list_alarm(req, body: NlaReq) -> ListAlarmResponse:
product) product)
@summary("小程序消息列表") @summary("小程序消息列表")
async def post_wx_list_alarm(req, body: WlaReq) -> ListAlarmResponse: async def post_wx_list_alarm(req, body: WlaReq) -> ListAlarmResponse:
# 1. 获取参数 # 1. 获取参数
......
...@@ -7,6 +7,7 @@ from pot_libs.es_util.es_utils import EsUtil ...@@ -7,6 +7,7 @@ from pot_libs.es_util.es_utils import EsUtil
from pot_libs.logger import log from pot_libs.logger import log
from pot_libs.mysql_util.mysql_util import MysqlUtil from pot_libs.mysql_util.mysql_util import MysqlUtil
from unify_api import constants from unify_api import constants
from unify_api.constants import CST
def point_day2month(dt): def point_day2month(dt):
...@@ -24,55 +25,35 @@ def point_day2month(dt): ...@@ -24,55 +25,35 @@ def point_day2month(dt):
async def today_alarm_cnt(cids): async def today_alarm_cnt(cids):
filters = [
{"terms": {"cid": cids}},
{"term": {"mode": "alarm"}},
]
start_time = pendulum.today(tz="Asia/Shanghai") start_time = pendulum.today(tz="Asia/Shanghai")
es_end_time = start_time.subtract(days=-1).format("YYYY-MM-DDTHH:mm:ss+08:00") es_end_time = start_time.subtract(days=-1).format("YYYY-MM-DD HH:mm:ss")
es_start_time = start_time.format("YYYY-MM-DDTHH:mm:ss+08:00") es_start_time = start_time.format("YYYY-MM-DD HH:mm:ss")
filters.append({"range": {"datetime": {"gte": es_start_time, "lt": es_end_time,}}},)
query_body = {
"query": {"bool": {"filter": filters}},
"size": 0,
"aggs": {
"cid_aggs": {
"terms": {"field": "cid", "size": 10000},
"aggs": {
"date_alarms": {
"date_histogram": {
"field": "datetime",
"order": {"_key": "desc"},
"min_doc_count": 0,
"interval": "day",
"format": "yyyy-MM-dd",
"time_zone": "+08:00",
}
}
},
}
},
}
async with EsUtil() as es:
es_result = await es.search_origin(body=query_body, index=constants.POINT_1MIN_EVENT)
cid_buckets = es_result.get("aggregations", {}).get("cid_aggs", {}).get("buckets", []) sql = f"""
cid_bucket_map = {bucket["key"]: bucket for bucket in cid_buckets} select cid,count(*) count
from point_1min_event pe
left join event_type et on pe.event_type = et.e_type
where cid in %s and et.mode = 'alarm' and event_datetime >= %s
and event_datetime < %s
group by cid
"""
async with MysqlUtil() as conn:
datas = await conn.fetchall(sql=sql,
args=(cids, es_start_time, es_end_time))
cid_bucket_map = {i["cid"]: i["count"] for i in datas}
now_time = datetime.now()
cid_alarm_map = {cid: {"today_alarm_count": 0} for cid in cids} cid_alarm_map = {cid: {"today_alarm_count": 0} for cid in cids}
for cid in cids: for cid in cids:
bucket = {} alarm_count = cid_bucket_map.get("cid") or 0
if cid in cid_bucket_map: cid_alarm_map[cid]["today_alarm_count"] += alarm_count
bucket = cid_bucket_map[cid]
date_alarm_bucket = bucket.get("date_alarms", {}).get("buckets", [])
for i in date_alarm_bucket:
if i["key_as_string"] == str(now_time)[:10]:
cid_alarm_map[cid]["today_alarm_count"] += i["doc_count"]
return cid_alarm_map return cid_alarm_map
async def proxy_safe_run_info(cids, start_time_str=None, end_time_str=None):
async def proxy_safe_run_info(cids, start_time_str=None,
end_time_str=None):
""" """
批量获取 各个工厂的安全运行天数以及今日报警数, 如果是获取月份的,那么计算这个月的安全运行天数 批量获取 各个工厂的安全运行天数以及今日报警数, 如果是获取月份的,那么计算这个月的安全运行天数
:param cids: :param cids:
...@@ -83,83 +64,67 @@ async def proxy_safe_run_info(cids, start_time_str=None, end_time_str=None): ...@@ -83,83 +64,67 @@ async def proxy_safe_run_info(cids, start_time_str=None, end_time_str=None):
# {"term": {"mode": "alarm"}}, # {"term": {"mode": "alarm"}},
{"term": {"importance": 1}}, {"term": {"importance": 1}},
] ]
where = ""
start_dt, end_dt, start_ts, end_ts = None, None, 0, 0
now_dt = pendulum.now(tz=CST)
if start_time_str and end_time_str: if start_time_str and end_time_str:
start_dt = datetime.strptime(start_time_str, "%Y-%m-%d %H:%M:%S") start_dt = pendulum.parse(start_time_str)
end_dt = datetime.strptime(end_time_str, "%Y-%m-%d %H:%M:%S") end_dt = pendulum.parse(end_time_str)
now = datetime.now() start_ts = start_dt.int_timestamp
if end_dt > now: end_ts = end_dt.int_timestamp
end_dt = now now_ts = now_dt.int_timestamp
es_start_str = datetime( if end_ts > now_ts:
year=start_dt.year, month=start_dt.month, day=start_dt.day end_time_str = now_dt.format("YYYY-MM-DD HH:mm:ss")
).strftime("%Y-%m-%dT%H:%M:%S+08:00") where += f" and event_datetime>= '{start_time_str}' and " \
es_end_str = end_dt.strftime("%Y-%m-%dT%H:%M:%S+08:00") f"event_datetime < '{end_time_str}' "
filters.append({"range": {"datetime": {"gte": es_start_str, "lt": es_end_str,}}},) sql = f"""
select cid,date_format(event_datetime,"%%Y-%%m-%%d") fmt_day,
query_body = { count(*) count
"query": {"bool": {"filter": filters}}, from point_1min_event
"size": 0, where cid in %s {where}
"aggs": { group by cid,date_format(event_datetime,"%%Y-%%m-%%d")
"cid_aggs": { """
"terms": {"field": "cid", "size": 10000}, async with MysqlUtil() as conn:
"aggs": { datas = await conn.fetchall(sql=sql, args=(cids,))
"date_alarms": {
"date_histogram": {
"field": "datetime",
"order": {"_key": "desc"},
"min_doc_count": 0,
"interval": "day",
"format": "yyyy-MM-dd",
"time_zone": "+08:00",
}
}
},
}
},
}
async with EsUtil() as es:
es_result = await es.search_origin(body=query_body, index=constants.POINT_1MIN_EVENT)
now_time = datetime.now()
# 获取到工厂安装时间create_time # 获取到工厂安装时间create_time
async with MysqlUtil() as conn: async with MysqlUtil() as conn:
company_sql = "select cid, create_time from company where cid in %s" company_sql = "select cid, create_time from company where cid in %s"
companys = await conn.fetchall(company_sql, (cids,)) companys = await conn.fetchall(company_sql, (cids,))
create_time_timestamp_map = { create_time_timestamp_map = {
company["cid"]: datetime.fromtimestamp(company["create_time"]) for company in companys company["cid"]: pendulum.from_timestamp(
company["create_time"], tz=CST) for company in companys
} }
cid_alarm_map = {cid: {"today_alarm_count": 0, "safe_run_days": 0} for cid in cids} cid_alarm_map = {cid: {"today_alarm_count": 0, "safe_run_days": 0} for cid
in cids}
cid_buckets = es_result.get("aggregations", {}).get("cid_aggs", {}).get("buckets", []) cid_alarm_count_dict = dict()
cid_bucket_map = {bucket["key"]: bucket for bucket in cid_buckets} for data in datas:
cid = data.get("cid")
if cid not in cid_alarm_count_dict:
cid_alarm_count_dict[cid] = 0
elif data.get("count") > 0:
cid_alarm_count_dict[cid] += 1
for cid in cids: for cid in cids:
create_time = create_time_timestamp_map[cid] create_dt = create_time_timestamp_map[cid]
total_days = (now_time - create_time).days + 1 total_days = (now_dt - create_dt).days + 1
if start_time_str and end_time_str: if start_time_str and end_time_str:
# 计算一段时间内安全运行天数,总天数的逻辑稍微不一样 # 计算一段时间内安全运行天数,总天数的逻辑稍微不一样
total_days = (end_dt.date() - start_dt.date()).days + 1 total_days = (end_dt - start_dt).days + 1
if create_time > start_dt and create_time < end_dt: create_ts = create_dt.int_timestamp
total_days = (end_dt.date() - create_time.date()).days + 1 if start_ts < create_ts < end_ts:
elif create_time > end_dt: total_days = (end_dt - create_dt).days + 1
elif create_ts > end_ts:
total_days = 0 total_days = 0
has_alarm_days = 0 has_alarm_days = cid_alarm_count_dict.get("cid") or 0
bucket = {}
if cid in cid_bucket_map:
bucket = cid_bucket_map[cid]
date_alarm_bucket = bucket.get("date_alarms", {}).get("buckets", [])
for i in date_alarm_bucket:
if i["doc_count"] != 0:
# 没有报警,看做是安全运行了,统计累计安全运行的天数
has_alarm_days += 1
safe_run_days = total_days - has_alarm_days safe_run_days = total_days - has_alarm_days
cid_alarm_map[cid]["safe_run_days"] = safe_run_days cid_alarm_map[cid]["safe_run_days"] = safe_run_days
cid_alarm_map[cid]["total_days"] = total_days cid_alarm_map[cid]["total_days"] = total_days
today_alarm_map = await today_alarm_cnt(cids) today_alarm_map = await today_alarm_cnt(cids)
for cid in cid_alarm_map: for cid in cid_alarm_map:
cid_alarm_map[cid]["today_alarm_count"] = today_alarm_map[cid]["today_alarm_count"] cid_alarm_map[cid]["today_alarm_count"] = today_alarm_map[cid][
"today_alarm_count"]
return cid_alarm_map return cid_alarm_map
...@@ -178,9 +143,10 @@ async def alarm_time_distribution(company_ids, start, end): ...@@ -178,9 +143,10 @@ async def alarm_time_distribution(company_ids, start, end):
HOUR (pevent.event_datetime) HOUR (pevent.event_datetime)
""" """
async with MysqlUtil() as conn: async with MysqlUtil() as conn:
datas = await conn.fetchall(sql, args=(company_ids, )) datas = await conn.fetchall(sql, args=(company_ids,))
time_distribution_map = {"day_alarm_cnt": 0, "night_alarm_cnt": 0, "morning_alarm_cnt": 0} time_distribution_map = {"day_alarm_cnt": 0, "night_alarm_cnt": 0,
"morning_alarm_cnt": 0}
for data in datas: for data in datas:
hour = int(data["event_hour"]) hour = int(data["event_hour"])
if hour >= 6 and hour < 18: if hour >= 6 and hour < 18:
...@@ -195,7 +161,8 @@ async def alarm_time_distribution(company_ids, start, end): ...@@ -195,7 +161,8 @@ async def alarm_time_distribution(company_ids, start, end):
async def alarm_time_distribution_old(company_ids, start, end): async def alarm_time_distribution_old(company_ids, start, end):
start_dt = datetime.strptime(start, "%Y-%m-%d %H:%M:%S") start_dt = datetime.strptime(start, "%Y-%m-%d %H:%M:%S")
end_dt = datetime.strptime(end, "%Y-%m-%d %H:%M:%S") end_dt = datetime.strptime(end, "%Y-%m-%d %H:%M:%S")
es_start_str = datetime(year=start_dt.year, month=start_dt.month, day=start_dt.day).strftime( es_start_str = datetime(year=start_dt.year, month=start_dt.month,
day=start_dt.day).strftime(
"%Y-%m-%dT%H:%M:%S+08:00" "%Y-%m-%dT%H:%M:%S+08:00"
) )
es_end_str = end_dt.strftime("%Y-%m-%dT%H:%M:%S+08:00") es_end_str = end_dt.strftime("%Y-%m-%dT%H:%M:%S+08:00")
...@@ -227,10 +194,12 @@ async def alarm_time_distribution_old(company_ids, start, end): ...@@ -227,10 +194,12 @@ async def alarm_time_distribution_old(company_ids, start, end):
log.info("alarm time distribute query_body={}".format(query_body)) log.info("alarm time distribute query_body={}".format(query_body))
async with EsUtil() as es: async with EsUtil() as es:
es_result = await es.search_origin(body=query_body, index=constants.POINT_1MIN_EVENT) es_result = await es.search_origin(body=query_body,
index=constants.POINT_1MIN_EVENT)
print(f"alarm time distribute es_result = {es_result}") print(f"alarm time distribute es_result = {es_result}")
buckets = es_result["aggregations"]["cid_aggs"]["buckets"] or [] buckets = es_result["aggregations"]["cid_aggs"]["buckets"] or []
time_distribution_map = {"day_alarm_cnt": 0, "night_alarm_cnt": 0, "morning_alarm_cnt": 0} time_distribution_map = {"day_alarm_cnt": 0, "night_alarm_cnt": 0,
"morning_alarm_cnt": 0}
for i in buckets: for i in buckets:
cid_buckets = i.get("time_alarms", {}).get("buckets", []) cid_buckets = i.get("time_alarms", {}).get("buckets", [])
for item in cid_buckets: for item in cid_buckets:
......
...@@ -9,7 +9,8 @@ from pot_libs.logger import log ...@@ -9,7 +9,8 @@ from pot_libs.logger import log
from pot_libs.mysql_util.mysql_util import MysqlUtil from pot_libs.mysql_util.mysql_util import MysqlUtil
from unify_api.modules.common.dao.health_score_dao import \ from unify_api.modules.common.dao.health_score_dao import \
health_score_points_aggs, get_point_dats_dao, get_mean_datas_dao health_score_points_aggs, get_point_dats_dao, get_mean_datas_dao
from unify_api.modules.common.procedures.points import get_points from unify_api.modules.common.procedures.points import get_points, \
get_points_new15
from unify_api.modules.electric.procedures.electric_util import \ from unify_api.modules.electric.procedures.electric_util import \
batch_get_wiring_type batch_get_wiring_type
from unify_api.modules.home_page.procedures import point_inlines from unify_api.modules.home_page.procedures import point_inlines
...@@ -117,9 +118,11 @@ async def load_health_radar(cid, param_point_id=None): ...@@ -117,9 +118,11 @@ async def load_health_radar(cid, param_point_id=None):
"SELECT pid, mtid FROM point WHERE pid IN %s order by pid, create_time asc" "SELECT pid, mtid FROM point WHERE pid IN %s order by pid, create_time asc"
) )
async with MysqlUtil() as conn: async with MysqlUtil() as conn:
change_meter_records = await conn.fetchall(sql, args=(tuple(all_point_ids),)) change_meter_records = await conn.fetchall(sql, args=(
tuple(all_point_ids),))
point_mid_map = { point_mid_map = {
i["pid"]: i["mtid"] for i in change_meter_records if i["mtid"] is not None i["pid"]: i["mtid"] for i in change_meter_records if
i["mtid"] is not None
} }
# 获取meter_param_record中的标准电压 # 获取meter_param_record中的标准电压
...@@ -128,7 +131,8 @@ async def load_health_radar(cid, param_point_id=None): ...@@ -128,7 +131,8 @@ async def load_health_radar(cid, param_point_id=None):
if all_mids: if all_mids:
async with MysqlUtil() as conn: async with MysqlUtil() as conn:
sql = "SELECT mtid, vc, voltage_side, ctnum FROM point WHERE mtid IN %s order by mtid, create_time asc" sql = "SELECT mtid, vc, voltage_side, ctnum FROM point WHERE mtid IN %s order by mtid, create_time asc"
meter_param_records = await conn.fetchall(sql, args=(tuple(all_mids),)) meter_param_records = await conn.fetchall(sql,
args=(tuple(all_mids),))
meter_param_map = {i["mtid"]: i for i in meter_param_records} meter_param_map = {i["mtid"]: i for i in meter_param_records}
log.info(f"all_mids={all_mids}") log.info(f"all_mids={all_mids}")
...@@ -253,7 +257,8 @@ async def load_health_radar(cid, param_point_id=None): ...@@ -253,7 +257,8 @@ async def load_health_radar(cid, param_point_id=None):
lf_score, lf_score,
) )
if not thdu_score: if not thdu_score:
thdu_score = (v_score + freq_score + ubl_score + costtl_score + lf_score) / 5.0 thdu_score = (
v_score + freq_score + ubl_score + costtl_score + lf_score) / 5.0
# 存入redis # 存入redis
score_info = { score_info = {
...@@ -441,54 +446,26 @@ async def load_manage_health_radar(cids, recent_days=30): ...@@ -441,54 +446,26 @@ async def load_manage_health_radar(cids, recent_days=30):
# 计算最近30天时间起始 # 计算最近30天时间起始
today = pendulum.today() today = pendulum.today()
start_time = str(today.subtract(days=recent_days)) start_time = today.subtract(days=recent_days).format("YYYY-MM-DD HH:mm:ss")
end_time = str(today.subtract(seconds=1)) end_time = str(today.subtract(seconds=1)).format("YYYY-MM-DD HH:mm:ss")
company_point_map = await get_points(cids) company_point_map = await get_points_new15(cids)
all_point_map = dict()
all_point_map = { for cid, points in company_point_map.items():
point_id: point_info for pid, point_info in points.items():
for i in company_point_map.values() all_point_map[pid] = point_info
for point_id, point_info in i.items()
}
all_point_ids = list(all_point_map.keys()) all_point_ids = list(all_point_map.keys())
query_body = { sql = f"""
"query": { select pid,avg(ua_mean) ua_mean,avg(uab_mean) uab_mean,avg(freq_mean) freq_mean,
"bool": { avg(ubl_mean) ubl_mean,avg(costtl_mean) costtl_mean,
"filter": [ avg(thdua_mean) thdua_mean,avg(lf_mean) lf_mean from
{"terms": {"pid": all_point_ids}}, point_15min_electric
{"range": {"quarter_time": {"gte": start_time, "lte": end_time,}}}, where pid in %s and create_time >= %s and create_time <=%s
], group by pid
} """
}, async with MysqlUtil() as conn:
"size": 0, datas = await conn.fetchall(sql,
"aggs": {}, args=(all_point_ids, start_time, end_time))
} data_map = {i['pid']: i for i in datas}
for point_id in all_point_ids:
ctnum = all_point_map[point_id]["meter_param"]["ctnum"]
if ctnum == 3:
stats_items = [
"ua_mean",
"freq_mean",
"ubl_mean",
"costtl_mean",
"thdua_mean",
"lf_mean",
]
else:
stats_items = ["uab_mean", "freq_mean", "ubl_mean", "costtl_mean", "lf_mean"]
aggs_stats = {}
for stats_item in stats_items:
aggs_stats[stats_item] = {"stats": {"field": stats_item}}
query_body["aggs"][f"point_id_{point_id}_aggs"] = {
"filter": {"term": {"pid": point_id}},
"aggs": aggs_stats,
}
async with EsUtil() as es:
es_result = await es.search_origin(body=query_body, index=POINT_15MIN_INDEX)
# 单独计算每个公司的健康指数 # 单独计算每个公司的健康指数
company_score_map = {} company_score_map = {}
...@@ -508,7 +485,7 @@ async def load_manage_health_radar(cids, recent_days=30): ...@@ -508,7 +485,7 @@ async def load_manage_health_radar(cids, recent_days=30):
continue continue
inline_point_ids, point_ids = [], [] inline_point_ids, point_ids = [], []
for point_id, point_item in point_map.items(): for point_id, point_item in point_map.items():
if point_item["inlid_belongedto"]: if point_item["inlid"]:
inline_point_ids.append(point_id) inline_point_ids.append(point_id)
else: else:
point_ids.append(point_id) point_ids.append(point_id)
...@@ -516,20 +493,15 @@ async def load_manage_health_radar(cids, recent_days=30): ...@@ -516,20 +493,15 @@ async def load_manage_health_radar(cids, recent_days=30):
# 1. 电压偏差评分 # 1. 电压偏差评分
total, total_score = 0, 0 total, total_score = 0, 0
for point_id in inline_point_ids + point_ids: for point_id in inline_point_ids + point_ids:
ua_mean = ( data_point_map = data_map.get(point_id)
es_result.get("aggregations", {}) if not data_point_map:
.get(f"point_id_{point_id}_aggs", {}) continue
.get("ua_mean", {}) ua_mean = data_point_map.get("ua_mean")
.get("avg")
)
if ua_mean is None: if ua_mean is None:
continue continue
point_info = all_point_map[point_id] point_info = all_point_map[point_id]
if not point_info["meter_param"]: meter_vc, ctnum = point_info.get("vc"), point_info.get(
# 没有参数的装置, 拆了? "ctnum") or 3
continue
meter_param = point_info["meter_param"]
meter_vc, ctnum = meter_param.get("vc"), meter_param.get("ctnum") or 3
if meter_vc: if meter_vc:
stand_voltage = meter_vc / sqrt(3) if ctnum == 3 else meter_vc stand_voltage = meter_vc / sqrt(3) if ctnum == 3 else meter_vc
else: else:
...@@ -546,12 +518,10 @@ async def load_manage_health_radar(cids, recent_days=30): ...@@ -546,12 +518,10 @@ async def load_manage_health_radar(cids, recent_days=30):
# 2. 频率偏差评分 # 2. 频率偏差评分
total, total_score = 0, 0 total, total_score = 0, 0
for point_id in inline_point_ids + point_ids: for point_id in inline_point_ids + point_ids:
freq_mean = ( data_point_map = data_map.get(point_id)
es_result.get("aggregations", {}) if not data_point_map:
.get(f"point_id_{point_id}_aggs", {}) continue
.get("freq_mean", {}) freq_mean = data_point_map.get("freq_mean")
.get("avg")
)
if freq_mean is None: if freq_mean is None:
continue continue
...@@ -566,12 +536,10 @@ async def load_manage_health_radar(cids, recent_days=30): ...@@ -566,12 +536,10 @@ async def load_manage_health_radar(cids, recent_days=30):
# 3. 三相[电压]不平衡评分 # 3. 三相[电压]不平衡评分
total, total_score = 0, 0 total, total_score = 0, 0
for point_id in inline_point_ids + point_ids: for point_id in inline_point_ids + point_ids:
ubl_avg = ( data_point_map = data_map.get(point_id)
es_result.get("aggregations", {}) if not data_point_map:
.get(f"point_id_{point_id}_aggs", {}) continue
.get("ubl_mean", {}) ubl_avg = data_point_map.get("ubl_mean")
.get("avg")
)
if ubl_avg is None: if ubl_avg is None:
continue continue
score = get_dev_score(dev_type="ubl", cur=ubl_avg) score = get_dev_score(dev_type="ubl", cur=ubl_avg)
...@@ -588,12 +556,10 @@ async def load_manage_health_radar(cids, recent_days=30): ...@@ -588,12 +556,10 @@ async def load_manage_health_radar(cids, recent_days=30):
else: else:
ids = point_ids ids = point_ids
for point_id in ids: for point_id in ids:
costtl_mean = ( data_point_map = data_map.get(point_id)
es_result.get("aggregations", {}) if not data_point_map:
.get(f"point_id_{point_id}_aggs", {}) continue
.get("costtl_mean", {}) costtl_mean = data_point_map.get("costtl_mean")
.get("avg")
)
if costtl_mean is None: if costtl_mean is None:
continue continue
score = get_dev_score(dev_type="costtl", cur=costtl_mean) score = get_dev_score(dev_type="costtl", cur=costtl_mean)
...@@ -607,12 +573,10 @@ async def load_manage_health_radar(cids, recent_days=30): ...@@ -607,12 +573,10 @@ async def load_manage_health_radar(cids, recent_days=30):
# 电压谐波畸变:只计算三表法计量点,如果所有监测点都是二表法,则取其他所有指标均值 # 电压谐波畸变:只计算三表法计量点,如果所有监测点都是二表法,则取其他所有指标均值
total, total_score = 0, 0 total, total_score = 0, 0
for point_id in inline_point_ids + point_ids: for point_id in inline_point_ids + point_ids:
thdua_mean = ( data_point_map = data_map.get(point_id)
es_result.get("aggregations", {}) if not data_point_map:
.get(f"point_id_{point_id}_aggs", {}) continue
.get("thdua_mean", {}) thdua_mean = data_point_map.get("thdua_mean")
.get("avg")
)
if thdua_mean is None: if thdua_mean is None:
continue continue
score = get_dev_score(dev_type="thdu", cur=thdua_mean) score = get_dev_score(dev_type="thdu", cur=thdua_mean)
...@@ -625,12 +589,10 @@ async def load_manage_health_radar(cids, recent_days=30): ...@@ -625,12 +589,10 @@ async def load_manage_health_radar(cids, recent_days=30):
# 5. 负载率 # 5. 负载率
total, total_score = 0, 0 total, total_score = 0, 0
for point_id in inline_point_ids + point_ids: for point_id in inline_point_ids + point_ids:
lf_mean = ( data_point_map = data_map.get(point_id)
es_result.get("aggregations", {}) if not data_point_map:
.get(f"point_id_{point_id}_aggs", {}) continue
.get("lf_mean", {}) lf_mean = data_point_map.get("lf_mean")
.get("avg")
)
if lf_mean is None: if lf_mean is None:
score = 100 score = 100
else: else:
...@@ -652,7 +614,8 @@ async def load_manage_health_radar(cids, recent_days=30): ...@@ -652,7 +614,8 @@ async def load_manage_health_radar(cids, recent_days=30):
lf_score, lf_score,
) )
if not thdu_score: if not thdu_score:
thdu_score = (v_score + freq_score + ubl_score + costtl_score + lf_score) / 5.0 thdu_score = (
v_score + freq_score + ubl_score + costtl_score + lf_score) / 5.0
company_score_map[cid] = { company_score_map[cid] = {
"v_score": v_score, "v_score": v_score,
...@@ -692,5 +655,6 @@ async def load_manage_health_index(company_score_info): ...@@ -692,5 +655,6 @@ async def load_manage_health_index(company_score_info):
sub_costtl = (1 - score_info["costtl_score"] / 100.0) * 20 sub_costtl = (1 - score_info["costtl_score"] / 100.0) * 20
sub_thdu = (1 - score_info["thdu_score"] / 100.0) * 20 sub_thdu = (1 - score_info["thdu_score"] / 100.0) * 20
sub_ubl = (1 - score_info["ubl_score"] / 100.0) * 20 sub_ubl = (1 - score_info["ubl_score"] / 100.0) * 20
company_index_map[cid] = 100 - sub_dev - sub_lf - sub_costtl - sub_thdu - sub_ubl company_index_map[
cid] = 100 - sub_dev - sub_lf - sub_costtl - sub_thdu - sub_ubl
return company_index_map return company_index_map
...@@ -56,13 +56,14 @@ async def get_points(company_ids): ...@@ -56,13 +56,14 @@ async def get_points(company_ids):
async def get_points_new15(cids): async def get_points_new15(cids):
sql = "SELECT p.pid,p.cid,p.inlid FROM `point` p INNER JOIN monitor m " \ sql = "SELECT p.pid,p.cid,p.inlid,vc,ctnum " \
"on m.mtid=p.mtid where p.cid in %s and m.demolished=0;" "FROM `point` p INNER JOIN " \
"monitor m on m.mtid=p.mtid where p.cid in %s and m.demolished=0;"
async with MysqlUtil() as conn: async with MysqlUtil() as conn:
points = await conn.fetchall(sql, args=(cids,)) points = await conn.fetchall(sql, args=(cids,))
company_point_map = defaultdict(dict) company_point_map = defaultdict(dict)
for point in points: for point in points:
company_point_map[point["cid"]][point["pid"]] = points company_point_map[point["cid"]][point["pid"]] = point
return company_point_map return company_point_map
......
import random
from datetime import datetime, timedelta from datetime import datetime, timedelta
import pendulum
from pot_libs.es_util.es_utils import EsUtil from pot_libs.es_util.es_utils import EsUtil
from pot_libs.logger import log from pot_libs.logger import log
from pot_libs.mysql_util.mysql_util import MysqlUtil from pot_libs.mysql_util.mysql_util import MysqlUtil
from pot_libs.utils.pendulum_wrapper import my_pendulum
from unify_api import constants from unify_api import constants
from unify_api.constants import COMPANY_1DAY_POWER, EVENT_TYPE_MAP, Importance from unify_api.constants import COMPANY_1DAY_POWER, EVENT_TYPE_MAP, Importance, \
CST
from unify_api.modules.alarm_manager.dao.list_static_dao import \ from unify_api.modules.alarm_manager.dao.list_static_dao import \
sdu_alarm_aggs_type sdu_alarm_aggs_type
from unify_api.modules.common.procedures.cids import get_cid_info from unify_api.modules.common.procedures.cids import get_cid_info
...@@ -19,19 +20,17 @@ from unify_api.modules.common.procedures.health_score import ( ...@@ -19,19 +20,17 @@ from unify_api.modules.common.procedures.health_score import (
load_manage_health_radar, load_manage_health_radar,
load_manage_health_index, load_manage_health_index,
) )
from unify_api.modules.common.procedures.points import get_points from unify_api.modules.common.procedures.points import get_points, \
get_points_new15
from unify_api.modules.home_page.procedures.count_info_pds import \ from unify_api.modules.home_page.procedures.count_info_pds import \
datetime_to_timestamp datetime_to_timestamp
from unify_api.utils.es_query_body import agg_statistics
from unify_api.utils.time_format import last30_day_range_today from unify_api.utils.time_format import last30_day_range_today
async def proxy_alarm_score(cids): async def proxy_alarm_score(cids):
now = datetime.now() now_dt = pendulum.now()
end_timestamp = datetime_to_timestamp(now) end_time = now_dt.format("YYYY-MM-DD HH:mm:ss")
start_timestamp = datetime_to_timestamp( start_time = now_dt.subtract(days=30).format("YYYY-MM-DD HH:mm:ss")
datetime(now.year, now.month, now.day) - timedelta(30))
score_events = [ score_events = [
i i
for i in EVENT_TYPE_MAP.keys() for i in EVENT_TYPE_MAP.keys()
...@@ -49,50 +48,38 @@ async def proxy_alarm_score(cids): ...@@ -49,50 +48,38 @@ async def proxy_alarm_score(cids):
"under_rms_u", "under_rms_u",
] ]
] ]
query_body = { sql = f"""
"query": { select cid,importance,count(*) count from point_1min_event
"bool": { where cid in %s and event_datetime >=%s and event_datetime <= %s
"filter": [ and event_type in %s
{"terms": {"cid": cids}}, group by cid,importance
{"terms": {"type.keyword": score_events, }}, """
{"range": {"time": {"gte": start_timestamp, log.info("cal_score_safe_electric sql={}".format(sql))
"lte": end_timestamp, }}}, async with MysqlUtil() as conn:
], datas = await conn.fetchall(sql, args=(
} cids, start_time, end_time, score_events))
}, data_map = {"{}-{}".format(i["cid"], i["importance"]): i["count"] for i in
"size": 0, datas}
"aggs": {},
}
for cid in cids:
query_body["aggs"][f"cid_{cid}_aggs"] = {
"filter": {"term": {"cid": cid}},
"aggs": {"importance": {"terms": {"field": "importance"}}},
}
log.info("cal_score_safe_electric query_body={}".format(query_body))
async with EsUtil() as es:
es_result = await es.search_origin(body=query_body,
index=constants.POINT_1MIN_EVENT)
cid_alarm_score_map = {} cid_alarm_score_map = {}
for cid in cids: for cid in cids:
cid_aggs_info = es_result.get("aggregations", {}).get( first_key = "{}-{}".format(cid, Importance.First.value)
f"cid_{cid}_aggs", {}) second_key = "{}-{}".format(cid, Importance.Second.value)
if not cid_aggs_info: third_key = "{}-{}".format(cid, Importance.Third.value)
cid_alarm_score_map["alarm_score"] = 0 if first_key not in data_map and second_key not in data_map and \
third_key not in data_map:
cid_alarm_score_map[cid] = 100
continue continue
first_alarm_cnt = 0 first_alarm_cnt = 0
second_alarm_cnt = 0 second_alarm_cnt = 0
third_alarm_cnt = 0 third_alarm_cnt = 0
for bucket in cid_aggs_info.get("importance", {}).get("buckets", []): if first_key in data_map:
if bucket["key"] == Importance.First.value: first_alarm_cnt = data_map.get(first_key)
first_alarm_cnt += bucket["doc_count"] if second_key in data_map:
elif bucket["key"] == Importance.Second.value: second_alarm_cnt = data_map.get(second_key)
second_alarm_cnt += bucket["doc_count"] if third_key in data_map:
elif bucket["key"] == Importance.Third.value: third_alarm_cnt = data_map.get(third_key)
third_alarm_cnt += bucket["doc_count"] company_point_map = await get_points_new15(cids)
company_point_map = await get_points(cids)
point_len = len(company_point_map.get(cid) or {}) point_len = len(company_point_map.get(cid) or {})
alarm_score = ( alarm_score = (
( (
...@@ -138,7 +125,7 @@ async def alarm_percentage_count(cids): ...@@ -138,7 +125,7 @@ async def alarm_percentage_count(cids):
FROM FROM
point_1min_event pevent point_1min_event pevent
WHERE WHERE
cid = %s cid in %s
AND pevent.event_datetime >= '{start}' AND pevent.event_datetime >= '{start}'
AND pevent.event_datetime <= '{end}' AND pevent.event_datetime <= '{end}'
GROUP BY GROUP BY
...@@ -151,7 +138,7 @@ async def alarm_percentage_count(cids): ...@@ -151,7 +138,7 @@ async def alarm_percentage_count(cids):
FROM FROM
point_1min_event pevent point_1min_event pevent
WHERE WHERE
cid = %s cid in %s
AND pevent.event_datetime >= '{start}' AND pevent.event_datetime >= '{start}'
AND pevent.event_datetime <= '{end}' AND pevent.event_datetime <= '{end}'
GROUP BY GROUP BY
...@@ -301,46 +288,23 @@ async def proxy_today_alarm_cnt(cids, group_field="importance"): ...@@ -301,46 +288,23 @@ async def proxy_today_alarm_cnt(cids, group_field="importance"):
:param group_field: example: "importance" :param group_field: example: "importance"
:return: :return:
""" """
now = datetime.now() start_time = pendulum.now(tz=CST).start_of(unit="day").format(
end_timestamp = datetime_to_timestamp(now) "YYYY-MM-DD HH:mm:ss")
start_timestamp = datetime_to_timestamp( end_time = pendulum.now(tz=CST).format("YYYY-MM-DD HH:mm:ss")
datetime(now.year, now.month, now.day)) if group_field == "type":
# 需要关联event_type里面的type
query_body = { group_field = "event_type"
"query": { sql = f"""
"bool": { select cid,{group_field},count(*) count from
"filter": [ point_1min_event
{"terms": {"cid": cids}}, where event_datetime >=%s and event_datetime <= %s
{"range": {"time": {"gte": start_timestamp, and cid in %s
"lte": end_timestamp, }}}, group by cid,{group_field}
], """
} async with MysqlUtil() as conn:
}, datas = await conn.fetchall(sql, args=(start_time, end_time, cids))
"size": 0,
"aggs": {},
}
int_group_field = [ log.info("alarm aggs sql={}".format(sql))
"importance",
]
for cid in cids:
query_body["aggs"][f"cid_{cid}_aggs"] = {
"filter": {"term": {"cid": cid}},
"aggs": {
f"{group_field}": {
"terms": {
"field": f"{group_field}"
if group_field in int_group_field
else f"{group_field}.keyword",
"size": 10000,
}
}
},
}
log.info("alarm aggs query_body={}".format(query_body))
async with EsUtil() as es:
es_result = await es.search_origin(body=query_body,
index=constants.POINT_1MIN_EVENT)
cid_alarm_map = { cid_alarm_map = {
cid: { cid: {
...@@ -355,13 +319,6 @@ async def proxy_today_alarm_cnt(cids, group_field="importance"): ...@@ -355,13 +319,6 @@ async def proxy_today_alarm_cnt(cids, group_field="importance"):
} }
for cid in cids for cid in cids
} }
for cid in cids:
cid_buckets = (
es_result.get("aggregations", {})
.get(f"cid_{cid}_aggs", {})
.get(group_field, {})
.get("buckets", [])
)
alarm_type_map = { alarm_type_map = {
Importance.First.value: "first_alarm_cnt", Importance.First.value: "first_alarm_cnt",
Importance.Second.value: "second_alarm_cnt", Importance.Second.value: "second_alarm_cnt",
...@@ -372,10 +329,12 @@ async def proxy_today_alarm_cnt(cids, group_field="importance"): ...@@ -372,10 +329,12 @@ async def proxy_today_alarm_cnt(cids, group_field="importance"):
"power_quality_low": "electric_quantity", "power_quality_low": "electric_quantity",
"ele_car_battery": "ele_car_battery", "ele_car_battery": "ele_car_battery",
} }
for bucket in cid_buckets: for data in datas:
if bucket["key"] in alarm_type_map: cid = data.get("cid")
_key = alarm_type_map[bucket["key"]] key = data.get(group_field)
cid_alarm_map[cid][_key] += bucket["doc_count"] if key in alarm_type_map:
value = alarm_type_map.get(key)
cid_alarm_map[cid][value] = data.get("count")
return cid_alarm_map return cid_alarm_map
...@@ -385,53 +344,22 @@ async def proxy_today_spfv_cnt(cids): ...@@ -385,53 +344,22 @@ async def proxy_today_spfv_cnt(cids):
:param cids: :param cids:
:return: :return:
""" """
now = datetime.now() start_time = pendulum.now(tz=CST).start_of(unit="day").format(
start_time = datetime(now.year, now.month, now.day) "YYYY-MM-DD HH:mm:ss")
end_time = datetime(now.year, now.month, now.day, now.hour, now.minute, end_time = pendulum.now(tz=CST).format("YYYY-MM-DD HH:mm:ss")
now.second) sql = f"""
es_start = datetime.strftime(start_time, "%Y-%m-%dT%H:%M:%S+08:00") select cid,spfv,sum(kwh) kwh from company_15min_power
es_end = datetime.strftime(end_time, "%Y-%m-%dT%H:%M:%S+08:00") where cid in %s and create_time >=%s and create_time <=%s
group by cid,spfv
query_body = { """
"query": { async with MysqlUtil() as conn:
"bool": { datas = await conn.fetchall(sql, args=(cids, start_time, end_time))
"filter": [
{"terms": {"cid": cids}},
{"range": {
"quarter_time": {"gte": es_start, "lte": es_end}}},
]
}
},
"size": 0,
"aggs": {
"cid_aggs": {
# 注意这里size不设置的话,只会返回结果聚合结果10条,也就是cids最多返回10个
"terms": {"field": "cid", "size": 10000},
"aggs": {
"spfv_aggs": {
"terms": {"field": "spfv.keyword"},
"aggs": {"kwh": {"sum": {"field": "kwh"}}},
}
},
}
},
}
log.info("spfv aggs query_body={}".format(query_body))
async with EsUtil() as es:
es_result = await es.search_origin(body=query_body,
index=constants.COMPANY_15MIN_POWER)
cid_buckets = es_result.get("aggregations", {}).get("cid_aggs", {}).get(
"buckets", [])
cid_spfv_map = {cid: {"s": 0, "p": 0, "f": 0, "v": 0} for cid in cids} cid_spfv_map = {cid: {"s": 0, "p": 0, "f": 0, "v": 0} for cid in cids}
for data in datas:
for bucket in cid_buckets: cid = data.get("cid")
cid = bucket.get("key") spfv = data.get("spfv")
spvf_buckets = bucket.get("spfv_aggs", {}).get("buckets", []) cid_spfv_map[cid][spfv] = data.get("kwh")
for i in spvf_buckets:
cid_spfv_map[cid][i["key"]] += round(i["kwh"]["value"])
log.info(f"cid_spfv_map = {cid_spfv_map}") log.info(f"cid_spfv_map = {cid_spfv_map}")
return cid_spfv_map return cid_spfv_map
...@@ -463,7 +391,8 @@ async def proxy_map_info(cids): ...@@ -463,7 +391,8 @@ async def proxy_map_info(cids):
) )
# 5. 健康排名 # 5. 健康排名
company_score_map = await load_manage_health_radar(cids, recent_days=7) company_score_map = await load_manage_health_radar(cids,
recent_days=7)
company_index_map = await load_manage_health_index(company_score_map) company_index_map = await load_manage_health_index(company_score_map)
health_index_list = sorted( health_index_list = sorted(
[(round(i), cid) for cid, i in company_index_map.items()], reverse=True [(round(i), cid) for cid, i in company_index_map.items()], reverse=True
......
...@@ -175,61 +175,19 @@ async def alarm_summary(company_ids, start, end, date_type): ...@@ -175,61 +175,19 @@ async def alarm_summary(company_ids, start, end, date_type):
:param date_type: :param date_type:
:return: :return:
""" """
start_dt = datetime.strptime(start, "%Y-%m-%d %H:%M:%S") sql = f"""
end_dt = datetime.strptime(end, "%Y-%m-%d %H:%M:%S") select cid,count(*) count from point_1min_event
es_start_str = datetime(year=start_dt.year, month=start_dt.month, where cid in %s and event_mode = 'alarm' and event_datetime >= %s
day=start_dt.day).strftime( and event_datetime <= %s
"%Y-%m-%dT%H:%M:%S+08:00" group by cid
) """
es_end_str = end_dt.strftime("%Y-%m-%dT%H:%M:%S+08:00") log.info("alarm_summary sql={}".format(sql))
if date_type == "day": async with MysqlUtil() as conn:
_format = "yyyy-MM-dd HH:mm:ss" datas = await conn.fetchall(sql, args=(company_ids, start, end))
_min = start_dt.strftime("%Y-%m-%d %H:%M:%S") print(f"datas = {datas}")
_max = end_dt.strftime("%Y-%m-%d %H:%M:%S")
else:
# date_type == "month"
_format = "yyyy-MM-dd"
_min = start_dt.strftime("%Y-%m-%d")
_max = end_dt.strftime("%Y-%m-%d")
filter_list = [
{"range": {"datetime": {"gte": es_start_str, "lte": es_end_str, }}},
{"term": {"mode": "alarm"}},
]
filter_list.append({"terms": {"cid": company_ids}})
query_body = {
"query": {"bool": {"filter": filter_list}},
"size": 0,
"aggs": {
"cid_aggs": {
"terms": {"field": "cid", "size": 10000},
"aggs": {
"date_alarms": {
"date_histogram": {
"field": "datetime",
"order": {"_key": "desc"},
"min_doc_count": 1,
"interval": "day",
"format": "yyyy-MM-dd",
"time_zone": "+08:00",
}
}
},
}
},
}
log.info("alarm_summary query_body={}".format(query_body))
async with EsUtil() as es:
es_result = await es.search_origin(body=query_body,
index=constants.POINT_1MIN_EVENT)
print(f"es_result = {es_result}")
buckets = es_result["aggregations"]["cid_aggs"]["buckets"] or []
total_alarm_cnt, alarm_company_cnt = sum( total_alarm_cnt, alarm_company_cnt = sum(
[i["doc_count"] for i in buckets]), len(buckets) [i["count"] for i in datas]), len(datas)
cid_alarmcnt_list = [i["doc_count"] for i in buckets] cid_alarmcnt_list = [i["count"] for i in datas]
safe_run_map = await proxy_safe_run_info(company_ids, start_time_str=start, safe_run_map = await proxy_safe_run_info(company_ids, start_time_str=start,
end_time_str=end) end_time_str=end)
......
...@@ -48,6 +48,7 @@ from unify_api.modules.home_page.service.count_info_service import \ ...@@ -48,6 +48,7 @@ from unify_api.modules.home_page.service.count_info_service import \
safe_run_sdu, safe_run_sdu_new15 safe_run_sdu, safe_run_sdu_new15
from unify_api.modules.elec_charge.components.elec_charge_cps import \ from unify_api.modules.elec_charge.components.elec_charge_cps import \
ProductProxyReq ProductProxyReq
from unify_api.modules.users.procedures.jwt_user import jwt_user
@summary("代理版首页统计信息-安电U") @summary("代理版首页统计信息-安电U")
...@@ -55,7 +56,7 @@ async def post_count_info_proxy(req) -> CountInfoProxyResp: ...@@ -55,7 +56,7 @@ async def post_count_info_proxy(req) -> CountInfoProxyResp:
# 1. 获取cid_list # 1. 获取cid_list
host = req.host host = req.host
product = PRODUCT.get(host) product = PRODUCT.get(host)
user_id = req.ctx.user_id user_id = jwt_user(req)
proxy_id = req.json.get("proxy_id") proxy_id = req.json.get("proxy_id")
# cid_list = await get_cids(user_id, product) # cid_list = await get_cids(user_id, product)
cid_list = await get_proxy_cids(user_id, product, proxy_id) cid_list = await get_proxy_cids(user_id, product, proxy_id)
...@@ -105,7 +106,7 @@ async def post_security_level_count( ...@@ -105,7 +106,7 @@ async def post_security_level_count(
async def post_alarm_percentage_count( async def post_alarm_percentage_count(
request, body: ProxySecurityLevelCntReq request, body: ProxySecurityLevelCntReq
) -> ProxyAlarmPercentageCntResp: ) -> ProxyAlarmPercentageCntResp:
user_id = request.ctx.user_id user_id = jwt_user(request)
product = body.product product = body.product
req_cid = body.cid req_cid = body.cid
if not req_cid: if not req_cid:
...@@ -137,7 +138,7 @@ async def post_alarm_percentage_count( ...@@ -137,7 +138,7 @@ async def post_alarm_percentage_count(
@summary("代理版本首页地图数据") @summary("代理版本首页地图数据")
async def post_proxy_map_info(request, async def post_proxy_map_info(request,
body: ProxySecurityLevelCntReq) -> ProxyIndexMapResp: body: ProxySecurityLevelCntReq) -> ProxyIndexMapResp:
user_id = request.ctx.user_id user_id = jwt_user(request)
product = body.product product = body.product
req_cid = body.cid req_cid = body.cid
if not req_cid: if not req_cid:
......
...@@ -23,6 +23,7 @@ from unify_api.modules.home_page.components.security_info_cps import ( ...@@ -23,6 +23,7 @@ from unify_api.modules.home_page.components.security_info_cps import (
from unify_api.modules.home_page.procedures.security_info_pds import ( from unify_api.modules.home_page.procedures.security_info_pds import (
alarm_summary, alarm_count_info_new15, alarm_summary, alarm_count_info_new15,
) )
from unify_api.modules.users.procedures.jwt_user import jwt_user
@summary("获取首页今日或者近30天安全报警统计信息") @summary("获取首页今日或者近30天安全报警统计信息")
...@@ -164,7 +165,7 @@ async def post_alarm_summary(request, body: SecurityCommonReq) -> AlarmSummaryRe ...@@ -164,7 +165,7 @@ async def post_alarm_summary(request, body: SecurityCommonReq) -> AlarmSummaryRe
if not req_cids: if not req_cids:
raise BusinessException(message=f"暂无工厂") raise BusinessException(message=f"暂无工厂")
if product == Product.AndianUManage.value: if product == Product.AndianUManage.value:
user_id = request.ctx.user_id user_id = jwt_user(request)
# cids = await get_cids(user_id, product) # cids = await get_cids(user_id, product)
proxy_id = body.proxy_id proxy_id = body.proxy_id
cids = await get_proxy_cids(user_id, product, proxy_id) cids = await get_proxy_cids(user_id, product, proxy_id)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment