Commit de51a7ff authored by ZZH's avatar ZZH

remove es 2023-5-29

parent 220a3fe5
......@@ -264,57 +264,6 @@ async def sdu_alarm_importance_dao_new15(start, end, points, is_sdu=None):
async def sdu_alarm_behavior_dao(start, end, points):
"""用电行为统计, 目前只有违规电器
如果还有其他统计, 则可先根据type分组, 再根据appliance分组
"""
start_es = convert_es_str(start)
end_es = convert_es_str(end)
query_body = {
"size": 0,
"query": {
"bool": {
"filter": [
{
"terms": {
"point_id": points
}
},
{
"range": {
"datetime": {
"gte": start_es,
"lte": end_es
}
}
},
{
"terms": {
"type.keyword": [
"illegal_ele_app"
]
}
}
]
}
},
"aggs": {
"appliance": {
"terms": {
"field": "appliance.keyword",
"size": 100
}
}
}
}
log.info("alarm_behavior query_body={}".format(query_body))
async with EsUtil() as es:
es_result = await es.search_origin(body=query_body,
index=constants.POINT_1MIN_EVENT)
return es_result["aggregations"]["appliance"]["buckets"]
async def sdu_alarm_behavior_dao_new15(start, end, points):
sql = f"SELECT appliance, count(1) doc_count " \
f"FROM `point_1min_event` WHERE pid in %s " \
f"and event_type = 'illegal_ele_app' and " \
......@@ -324,44 +273,6 @@ async def sdu_alarm_behavior_dao_new15(start, end, points):
return datas
async def sdu_alarm_aggs_date(cid):
"""sdu求安全运行
根据每日聚合,再根据points聚合
"""
query_body = {
"size": 0,
"query": {
"bool": {
"must": [
{
"term": {
"cid": cid
}
},
{"terms": {
"type.keyword": SDU_ONE_TWO_GRADE_ALARM}}
]
}
},
"aggs": {
"date_day": {
"date_histogram": {
"field": "datetime",
"interval": "day",
"time_zone": "+08:00",
"format": "yyyy-MM-dd",
"min_doc_count": 0
}
}
}
}
log.info("query_body={}".format(query_body))
async with EsUtil() as es:
es_result = await es.search_origin(body=query_body,
index=constants.POINT_1MIN_EVENT)
return es_result["aggregations"]["date_day"]["buckets"]
async def zdu_alarm_aggs_date_impotent(cid, start, end):
"""zdu求安全运行
根据每日聚合,再根据等级聚合
......@@ -383,48 +294,6 @@ async def zdu_alarm_aggs_date_impotent(cid, start, end):
return len(datas) if datas else 0
async def sdu_alarm_aggs_date_importance(cid): # todo: 扬尘es待改
"""按日期,再按等级聚合"""
query_body = {
"size": 0,
"query": {
"bool": {
"must": [
{
"term": {
"cid": cid
}
}
]
}
},
"aggs": {
"date_day": {
"date_histogram": {
"field": "datetime",
"interval": "day",
"time_zone": "+08:00",
"format": "yyyy-MM-dd",
"min_doc_count": 0
},
"aggs": {
"importance": {
"terms": {
"field": "importance",
"size": 10
}
}
}
}
}
}
log.info("query_body={}".format(query_body))
async with EsUtil() as es:
es_result = await es.search_origin(body=query_body,
index=constants.POINT_1MIN_EVENT)
return es_result["aggregations"]["date_day"]["buckets"]
async def sdu_alarm_aggs_type(cid, start, end):
sql = f"""
SELECT
......@@ -444,51 +313,6 @@ async def sdu_alarm_aggs_type(cid, start, end):
return datas if datas else []
async def sdu_alarm_aggs_type_old(cid, start, end):
start_dt = datetime.strptime(start, "%Y-%m-%d %H:%M:%S")
end_dt = datetime.strptime(end, "%Y-%m-%d %H:%M:%S")
es_start_str = datetime(year=start_dt.year, month=start_dt.month,
day=start_dt.day).strftime(
"%Y-%m-%dT%H:%M:%S+08:00")
es_end_str = end_dt.strftime("%Y-%m-%dT%H:%M:%S+08:00")
"""根据类型聚合"""
query_body = {
"size": 0,
"query": {
"bool": {
"must": [
{
"term": {
"cid": cid
}
},
{
"range": {
"datetime": {
"gte": es_start_str,
"lte": es_end_str
}
}
}
]
}
},
"aggs": {
"type": {
"terms": {
"field": "type.keyword",
"size": 40
}
}
}
}
log.info("query_body={}".format(query_body))
async with EsUtil() as es:
es_result = await es.search_origin(body=query_body,
index=constants.POINT_1MIN_EVENT)
return es_result["aggregations"]["type"]["buckets"]
async def alarm_aggs_importance(cid, start, end):
"""按报警等级聚合"""
sql = f"""
......
......@@ -5,10 +5,10 @@ from unify_api.modules.alarm_manager.components.alarm_static_cps import \
ZsResp, TimeCount, ZasResp
from unify_api.modules.alarm_manager.dao.list_static_dao import \
sdu_alarm_statistics_dao, sdu_alarm_type_dao, sdu_alarm_importance_dao, \
sdu_alarm_behavior_dao, sdu_alarm_limit_type_dao, \
sdu_alarm_limit_type_dao, \
zdu_alarm_aggs_date_impotent, sdu_alarm_type_dao_new15, \
sdu_alarm_importance_dao_new15, sdu_alarm_statistics_dao_new15, \
sdu_alarm_behavior_dao_new15, sdu_alarm_limit_type_dao_new15
sdu_alarm_behavior_dao, sdu_alarm_limit_type_dao_new15
from unify_api.modules.alarm_manager.procedures.alarm_static_pds import \
new_alarm_content_info, risk_distribution, zdu_summary_info, \
new_alarm_content_info_new15, risk_distribution_new15, \
......@@ -294,79 +294,15 @@ async def sdu_electric_behave_service(cid, start, end, storeys, product):
return SebResp(return_data=return_list)
async def sdu_index_alarm_ranking_service(cid, start, end, product):
"""首页-报警违规排名-新版识电u, 近30天"""
async def sdu_index_alarm_rank(cid, start, end, product):
points = await points_by_cid([cid])
if not points:
raise ParamException(message=f"{cid}没有points")
point_list = [i["pid"] for i in points]
points_map = {i["pid"]: i["name"] for i in points}
# 1. 违规电器排名
behavior_res = await sdu_alarm_behavior_dao(start, end, point_list)
behavior_illegal_app = []
if behavior_res:
for i in behavior_res:
tmp_dic = {"name": i["key"], "value": i["doc_count"]}
behavior_illegal_app.append(tmp_dic)
behavior_illegal_app = sorted(behavior_illegal_app,
key=lambda x: x["value"], reverse=True)
# 2. 报警排名, 违规行为
es_type_res = await sdu_alarm_limit_type_dao(start, end, point_list)
alarm_ranking = []
illegal_behavior = []
for buck in es_type_res:
point_name = points_map.get(buck["key"])
# 具体报警等级
# power_quality_low:能质量偏低--III级报警
# ele_overload: 线路过载--II级报警
# illegal_ele_app: 违规电器接入--I级报警
im1, im2, im3 = 0, 0, 0
if buck.get("type") and buck.get("type").get("buckets"):
for im in buck["type"]["buckets"]:
im3 = im["doc_count"] \
if im["key"] == "power_quality_low" else im3
im2 = im["doc_count"] \
if im["key"] == "ele_overload" else im2
im1 = im["doc_count"] \
if im["key"] == "illegal_ele_app" else im1
# 报警排名
alarm_dic = {
"name": point_name, "value": buck["doc_count"],
"im1": im1, "im2": im2, "im3": im3
}
alarm_ranking.append(alarm_dic)
# 违规行为
illegal_count = 0
type_buck = buck["type"]["buckets"]
for i in type_buck:
if i.get("key") == "illegal_ele_app":
illegal_count = i.get("doc_count")
illegal_dic = {"name": point_name, "value": illegal_count}
illegal_behavior.append(illegal_dic)
# 3. 排序
if len(alarm_ranking) > 1:
alarm_ranking = sorted(alarm_ranking, key=lambda x: x["value"],
reverse=True)
if len(illegal_behavior) > 1:
illegal_behavior = sorted(illegal_behavior, key=lambda x: x["value"],
reverse=True)
return SiarResp(
illegal_app=behavior_illegal_app[:5],
illegal_behavior=illegal_behavior[:5],
alarm_ranking=alarm_ranking[:5]
)
async def sdu_index_alarm_ranking_service_new15(cid, start, end, product):
points = await points_by_cid([cid])
if not points:
raise ParamException(message=f"{cid}没有points")
point_list = [i["pid"] for i in points]
points_map = {i["pid"]: i["name"] for i in points}
# 1. 违规电器排名
behavior_res = await sdu_alarm_behavior_dao_new15(start, end, point_list)
behavior_illegal_app = []
if behavior_res:
for i in behavior_res:
tmp_dic = {"name": i["appliance"], "value": i["doc_count"]}
......
......@@ -9,11 +9,11 @@ from unify_api.modules.alarm_manager.procedures.alarm_static_pds import \
from unify_api.modules.alarm_manager.service.alarm_static_service import \
sdu_alarm_statistics_service, sdu_alarm_statistics_sort_service, \
sdu_app_statistics_sort_service, sdu_electric_behave_service, \
sdu_index_alarm_ranking_service, zdu_level_distribution_service, \
zdu_level_distribution_service, \
zdu_content_distribution_service, zdu_summary_service, \
zdu_alarm_sort_service_2, sdu_alarm_statistics_sort_service_new15, \
sdu_app_statistics_sort_service_new15, \
sdu_index_alarm_ranking_service_new15
sdu_index_alarm_rank
from unify_api.modules.home_page.components.security_info_cps import \
SecurityCountResp, AlarmContentDistributionResp
from unify_api.modules.home_page.procedures.count_info_pds import \
......@@ -177,7 +177,7 @@ async def post_sdu_index_alarm_ranking(req, body: SiasReq) -> SiarResp:
product = body.product
# 最近30天, 不包含今天
start, end = last30_day_range()
return await sdu_index_alarm_ranking_service_new15(cid, start, end, product)
return await sdu_index_alarm_rank(cid, start, end, product)
@summary("报警统计-报警等级-智电u")
......
......@@ -21,26 +21,26 @@ async def get_points(company_ids):
company_point_ids_map = defaultdict(list)
for point in points:
company_point_ids_map[point["cid"]].append(point["pid"])
point_map = {i["pid"]: i for i in points}
point_ids = list(point_map.keys())
pid_field, start_time_field = "pid", "start_time"
sql = f"SELECT pid, mid FROM change_meter_record WHERE pid in %s ORDER BY {pid_field}, {start_time_field}"
records = await conn.fetchall(sql, args=(point_ids,))
newest_point_meter_relation = {i["pid"]: i["mid"] for i in records if
i["mid"]}
valid_mids = list(newest_point_meter_relation.values())
newest_record_map = {i["pid"]: point_map.get(i["pid"]) for i in records
if i["mid"]}
# 根据有效的meter id查询meter参数
async with MysqlUtil() as conn:
mid_field, start_time_field = "mid", "start_time"
mp_sql = f"SELECT vc, mid, ctnum FROM meter_param_record WHERE mid in %s ORDER BY {mid_field}, {start_time_field}"
mps = await conn.fetchall(mp_sql, args=(valid_mids,))
meter_param_map = {i["mid"]: i for i in mps}
for cid, point_ids in company_point_ids_map.items():
for point_id in point_ids:
if point_id in newest_record_map:
......@@ -85,23 +85,6 @@ async def proxy_points(cid_list):
async def get_meter_by_point(point_id):
"""
通过point_id获取sid
:param point_id:
:return: sid
"""
async with MysqlUtil() as conn:
sql = "SELECT mid from change_meter_record where pid = %s order by start_time desc limit 1"
point_meter_info = await conn.fetchone(sql, args=(point_id,))
if not point_meter_info:
return None
newest_mid = point_meter_info["mid"]
meter_sql = "SELECT sid, meter_no from meter where mid = %s"
meter_info = await conn.fetchone(meter_sql, args=(newest_mid,))
return meter_info
async def get_meter_by_point_new15(point_id):
"""
根据point获取设备数据
"""
......@@ -122,7 +105,7 @@ async def list_point(cid):
for res in result:
pid = res.get("pid")
points[pid] = res
sql = "SELECT id, `group`, item FROM location WHERE cid=%s and `type` in %s"
async with MysqlUtil() as conn:
result = await conn.fetchall(sql, args=(
......@@ -132,7 +115,7 @@ async def list_point(cid):
group = res.get("group")
item = res.get("item")
groups.setdefault(group, []).append((id, item))
for pid, point_info in points.items():
name = point_info.get("name")
add_to_company = point_info["add_to_company"]
......@@ -146,7 +129,7 @@ async def list_point(cid):
comm_point = {"name": name, "point_id": pid, "locations": locations,
"add_to_company": add_to_company}
list_point.append(comm_point)
async with MysqlUtil() as conn:
sql = "SELECT inlid, `name` FROM inline WHERE cid=%s"
inlines = await conn.fetchall(sql, args=(cid,))
......
......@@ -5,7 +5,7 @@ from pot_libs.utils.exc_util import BusinessException
from unify_api.constants import POINT_LEVEL_MAP, U_THRESHOLD, COSTTL_THRESHOLD, \
LF_THRESHOLD, THDU_THRESHOLD, BL_THRESHOLD, THDI_THRESHOLD
from unify_api.modules.common.procedures.points import points_by_storeys, \
get_meter_by_point_new15
get_meter_by_point
from unify_api.modules.common.service.td_engine_service import \
get_td_engine_data
from unify_api.modules.electric.dao.electric_dao import \
......@@ -899,7 +899,7 @@ async def elec_index_service_new15(cid, point_id, start, end):
async def elec_current_service_new15(point_id):
# 获取mtid
meter_info = await get_meter_by_point_new15(point_id)
meter_info = await get_meter_by_point(point_id)
if not meter_info:
raise BusinessException(
message="没有该监测点的monitor信息,请联系运维人员!")
......
......@@ -9,7 +9,7 @@ import re
from pot_libs.settings import SETTING
from datetime import datetime
from unify_api.modules.common.procedures.points import get_meter_by_point_new15
from unify_api.modules.common.procedures.points import get_meter_by_point
from unify_api.modules.common.service.td_engine_service import \
get_td_engine_data
......@@ -873,7 +873,7 @@ async def post_qual_current(req, body: PageRequest) -> QualCurrentResponse:
raise ParamException(
message="param exception, equals is NULL, no point_id")
# 获取mtid
meter_info = await get_meter_by_point_new15(point_id)
meter_info = await get_meter_by_point(point_id)
if not meter_info:
raise BusinessException(
message="没有该监测点的monitor信息,请联系运维人员!")
......
......@@ -58,7 +58,7 @@ async def other_info(company_id):
GROUP BY
DATE(pevent.event_datetime)
"""
now_time = datetime.now()
# 获取到工厂安装时间create_time
async with MysqlUtil() as conn:
......@@ -67,7 +67,7 @@ async def other_info(company_id):
company = await conn.fetchone(company_sql, (company_id,))
create_time_timestamp = company["create_time"]
create_time = datetime.fromtimestamp(create_time_timestamp)
today_alarm_count = 0
alarm_count = 0
if not alarm_data:
......@@ -78,7 +78,7 @@ async def other_info(company_id):
# 系统安全运行天数: 当前时间 - 工厂安装时间 + 1
safe_run_days = (now_time - create_time).days + 1
return today_alarm_count, safe_run_days, alarm_count
# 5. 构造返回
# 如果每天都有报警, 防止安全运行天数-1天, 所以total_days +2
total_days = (now_time - create_time).days + 2
......@@ -125,20 +125,20 @@ async def other_info_old(company_id):
}
},
}
async with EsUtil() as es:
es_result = await es.search_origin(body=query_body,
index=constants.POINT_1MIN_EVENT)
now_time = datetime.now()
# 获取到工厂安装时间create_time
async with MysqlUtil() as conn:
company_sql = "select create_time from company where cid = %s"
company = await conn.fetchone(company_sql, (company_id,))
create_time_timestamp = company["create_time"]
create_time = datetime.fromtimestamp(create_time_timestamp)
today_alarm_count = 0
alarm_count = 0
date_buckets = es_result.get("aggregations", {}).get("date_alarms",
......@@ -152,7 +152,7 @@ async def other_info_old(company_id):
# 系统安全运行天数: 当前时间 - 工厂安装时间 + 1
safe_run_days = (now_time - create_time).days + 1
return today_alarm_count, safe_run_days, alarm_count
# 5. 构造返回
# 如果每天都有报警, 防止安全运行天数-1天, 所以total_days +2
total_days = (now_time - create_time).days + 2
......@@ -210,124 +210,7 @@ def datetime_to_timestamp(dt):
return ans_time
async def electric_use_info(company_id):
"""
用电安全指数
:param company_id:
:return:
"""
now = datetime.now()
end_timestamp = datetime_to_timestamp(now)
start_timestamp = datetime_to_timestamp(
datetime(now.year, now.month, now.day) - timedelta(30))
score_events = [
i
for i in EVENT_TYPE_MAP.keys()
if i
not in [
"overTemp",
"overTempRange1min",
"overTempRange15min",
"overTempTrendDaily",
"overTempTrendQuarterly",
"over_gap_u",
"over_rms_u",
"over_gap_i",
"over_rms_i",
"under_rms_u",
"over_res_cur"
]
]
query_body = {
"query": {
"bool": {
"filter": [
{"term": {"cid": company_id}},
{"range": {"time": {"gte": start_timestamp,
"lte": end_timestamp, }}},
],
}
},
"size": 0,
"aggs": {
# 这里之所以分score_aggs和alarm_aggs是因为有些事件不计入报警分,但是报警统计又必须要有
"score_aggs": {
"filter": {"terms": {"type.keyword": score_events, }},
"aggs": {"types": {"terms": {"field": "importance"}}},
},
"alarm_aggs": {
"filter": {"term": {"cid": company_id}},
"aggs": {"types": {"terms": {"field": "importance"}}},
},
},
}
log.info("cal_score_safe_electric query_body={}".format(query_body))
async with EsUtil() as es:
es_result = await es.search_origin(body=query_body,
index=constants.POINT_1MIN_EVENT)
score_buckets = (
es_result.get("aggregations", {}).get("score_aggs", {}).get("types",
{}).get(
"buckets", [])
)
first_alarm_cnt = 0
second_alarm_cnt = 0
third_alarm_cnt = 0
for bucket in score_buckets:
if bucket["key"] == Importance.First.value:
first_alarm_cnt += bucket["doc_count"]
elif bucket["key"] == Importance.Second.value:
second_alarm_cnt += bucket["doc_count"]
elif bucket["key"] == Importance.Third.value:
third_alarm_cnt += bucket["doc_count"]
company_point_map = await get_points([company_id])
point_len = len(company_point_map.get(company_id) or {})
alarm_score = (
(
first_alarm_cnt * 2 + second_alarm_cnt * 1 + third_alarm_cnt * 0.5) / point_len
if point_len
else 0
)
log.info(f"company_point_map:{company_point_map}, point_len:{point_len}, "
f"alarm_score:{alarm_score}")
if alarm_score >= 15:
alarm_score = 15
electric_use_score = get_electric_index(alarm_score)
log.info(
"point_len={} alarm_score={} electric_use_score={}".format(
point_len, alarm_score, electric_use_score
)
)
alarm_buckets = (
es_result.get("aggregations", {}).get("alarm_aggs", {}).get("types",
{}).get(
"buckets", [])
)
first_alarm_cnt, second_alarm_cnt, third_alarm_cnt = 0, 0, 0
for bucket in alarm_buckets:
if bucket["key"] == Importance.First.value:
first_alarm_cnt += bucket["doc_count"]
elif bucket["key"] == Importance.Second.value:
second_alarm_cnt += bucket["doc_count"]
elif bucket["key"] == Importance.Third.value:
third_alarm_cnt += bucket["doc_count"]
return ElectricInfo(
first_alarm_cnt=first_alarm_cnt,
second_alarm_cnt=second_alarm_cnt,
third_alarm_cnt=third_alarm_cnt,
alarm_score=alarm_score,
electric_use_score=electric_use_score,
)
async def electric_use_info_new15(cid):
async def electric_use_info(cid):
"""1.5用电安全指数"""
now = str(datetime.now())
start = str(datetime.now() - timedelta(30))
......@@ -395,27 +278,27 @@ async def normal_rate_of_location(company_id):
)
for location in locations:
location_map[location["id"]] = location
# todo批量hmget
count_info_map = {
"residual_current": {"total": 0, "normal": 0},
"temperature": {"total": 0, "normal": 0},
}
print(f"len(location_map)={len(location_map)}")
location_ids = list(location_map.keys())
adio_currents = []
if location_ids:
adio_currents = await RedisUtils().hmget("adio_current",
*location_ids)
adio_info_map = {}
for index, item_byte in enumerate(adio_currents):
if item_byte:
item = json.loads(item_byte.decode())
adio_info_map[location_ids[index]] = item
for location_id, location_info in location_map.items():
audio_info = adio_info_map.get(location_id)
count_info_map[location_info["type"]]["total"] += 1
......@@ -426,7 +309,7 @@ async def normal_rate_of_location(company_id):
# 超过4小时的值不统计在normal里
log.warn(f"adio_current location_id={location_id} has expire!")
continue
print(
"threshold={} location_info['type'] = {} audio_info['value']={}".format(
location_info["threshold"], location_info["type"],
......@@ -453,7 +336,7 @@ async def normal_rate_of_location(company_id):
)
+ "%"
)
if count_info_map["residual_current"]["total"] == 0:
residual_current_qr = "100%"
else:
......@@ -469,7 +352,7 @@ async def normal_rate_of_location(company_id):
)
+ "%"
)
return temperature_qr, residual_current_qr
......@@ -500,7 +383,7 @@ async def normal_rate_of_location_new15(cid):
if item_byte:
item = json.loads(item_byte.decode())
adio_info_map[location_ids[index]] = item
for location_id, location_info in location_map.items():
audio_info = adio_info_map.get(location_id)
count_info_map[location_info["type"]]["total"] += 1
......@@ -531,7 +414,7 @@ async def normal_rate_of_location_new15(cid):
)
+ "%"
)
if count_info_map["residual_current"]["total"] == 0:
residual_current_qr = "100%"
else:
......@@ -547,7 +430,7 @@ async def normal_rate_of_location_new15(cid):
)
+ "%"
)
return temperature_qr, residual_current_qr
......@@ -562,10 +445,10 @@ async def current_load(company_id):
"and add_to_company = 1"
points = await conn.fetchall(point_sql, args=(company_id,))
point_ids = [p["pid"] for p in points]
if not point_ids:
return ""
async with MysqlUtil() as conn:
meter_sql = (
"SELECT pid, mid FROM change_meter_record WHERE pid in %s ORDER BY pid, start_time"
......@@ -575,15 +458,15 @@ async def current_load(company_id):
# 正序排序,最后这个map存储的是按照start_time是最近的mid
change_meter_map = {m["pid"]: m["mid"] for m in change_meters if
m["mid"] is not None}
newest_mids = list(change_meter_map.values())
meterdata_currents = []
if newest_mids:
meterdata_currents = await RedisUtils().hmget(METERDATA_CURRENT_KEY,
*newest_mids)
now_tt = int(time.time())
if meterdata_currents:
total = 0
for item in meterdata_currents:
......@@ -614,16 +497,16 @@ async def current_load_new15(cid, end_dt=None):
if not end_dt:
end_dt = pendulum.now(tz="Asia/Shanghai")
start_dt = end_dt.subtract(minutes=2)
sql = f"select last_row(mdptime, pttl) from electric_stb " \
f"where TBNAME IN {td_mt_tables} and ts>='{str(start_dt)}' and ts " \
f"<='{str(end_dt)}' group by tbname"
url = f"{SETTING.stb_url}db_electric?tz=Asia/Shanghai"
is_succ, results = await get_td_engine_data(url, sql)
if not is_succ:
return ""
if not results["data"]: # 兼容:mt表(2.0架构)里面拿不到数据再从sid表(1.0架构)里面拿
td_s_tables = tuple(
(f"s{data['sid'].lower()}_e" for data in datas if data["sid"]))
......@@ -633,7 +516,7 @@ async def current_load_new15(cid, end_dt=None):
is_succ, results = await get_td_engine_data(url, sql)
if not is_succ:
return ""
head = parse_td_columns(results)
datas = []
for res in results["data"]:
......@@ -657,7 +540,7 @@ async def power_count_info(company_id):
now = datetime.now()
start_time = (now - timedelta(30)).strftime("%Y-%m-%d %H:%M:%S")
end_time = now.strftime("%Y-%m-%d %H:%M:%S")
max_30d_load, _time = await pttl_max(company_id, start_time, end_time, -1)
cur_load = await current_load(company_id)
return cur_load, max_30d_load
......@@ -668,7 +551,7 @@ async def power_count_info_new15(cid):
now = datetime.now()
start_time = (now - timedelta(30)).strftime("%Y-%m-%d 00:00:00")
end_time = now.strftime("%Y-%m-%d %H:%M:%S")
max_30d_load, _time = await pttl_max_new15(cid, start_time, end_time, -1)
cur_load = await current_load_new15(cid)
return round_2(cur_load), round_2(max_30d_load)
......@@ -704,7 +587,7 @@ async def get_max_aiao_of_filed(company_id, start_time, end_time,
index=constants.LOCATION_15MIN_AIAO)
value_max = es_results.get("aggregations", {}).get("value_max_max", {})
rc_max_hits = value_max.get("hits", {}).get("hits")
max_info, location_map = {}, {}
if rc_max_hits:
max_info = rc_max_hits[0]["_source"]
......@@ -719,7 +602,7 @@ async def get_max_aiao_of_filed(company_id, start_time, end_time,
if max_info
else None
)
return MaxResidualCurrent(
max=round(max_info["value_max"], 2) if max_info else None,
location_name=f"{location_map['group']}_{'漏电流' if location_map['item'] == 'default' else location_map['item']}"
......@@ -804,7 +687,7 @@ async def power_charge_price(company_id):
yestoday_start = datetime(yestoday.year, yestoday.month, yestoday.day, 0,
0, 0)
yestoday_end = yestoday_start + timedelta(1)
es_yestoday_start = datetime.strftime(yestoday_start,
"%Y-%m-%dT%H:%M:%S+08:00")
es_yestoday_end = datetime.strftime(yestoday_end,
......@@ -812,7 +695,7 @@ async def power_charge_price(company_id):
yestoday_price = await get_company_charge_price(company_id,
es_yestoday_start,
es_yestoday_end)
if now.month == 1:
last_month = 12
year = now.year - 1
......@@ -827,7 +710,7 @@ async def power_charge_price(company_id):
last_month_price = await get_company_charge_price(
company_id, es_last_month_start, es_last_month_end
)
return yestoday_price, last_month_price
......@@ -861,90 +744,7 @@ async def power_charge_price_new15(cid):
return round_2(yestoday_price), round_2(last_month_price)
async def power_factor(company_id):
"""
首页获取实时功率因数, 上月功率因数
:param company_id:
:return:
"""
async with MysqlUtil() as conn:
point_sql = (
"select pid, inlid_belongedto from point where cid= %s and add_to_company=%s"
)
points = await conn.fetchall(point_sql, args=(company_id, 1))
point_ids = [i["pid"] for i in points]
now = datetime.now()
if now.month == 1:
last_month_dt = datetime(year=now.year - 1, month=12, day=1)
else:
last_month_dt = datetime(year=now.year, month=now.month - 1, day=1)
# 首页功率因数取所有进线中最小的
async with MysqlUtil() as conn:
sql = "SELECT inlid, `name` FROM inline WHERE cid=%s"
inlines = await conn.fetchall(sql, args=(company_id,))
inline_ids = [inline["inlid"] for inline in inlines]
power_factor_results = []
sql = "SELECT inlid, save_charge pf_cost, `kpi_x`, `save_charge` " \
"FROM algo_power_factor_result WHERE inlid in %s and month=%s"
if inline_ids:
power_factor_results = await conn.fetchall(sql, args=(
inline_ids, last_month_dt))
pf_kpi_x_list = [
i["kpi_x"] for i in power_factor_results if
type(i["kpi_x"]) in [int, float]
]
last_month_cos = min(pf_kpi_x_list) if len(pf_kpi_x_list) else ""
async with EsUtil() as es:
dt = pendulum.now(tz="Asia/Shanghai")
tstamp = dt.int_timestamp // (15 * 60) * (15 * 60)
dt = pendulum.from_timestamp(tstamp, tz="Asia/Shanghai")
filters = [
{"terms": {"pid": point_ids}},
{"terms": {
"quarter_time": [str(dt), str(dt - timedelta(minutes=15))]}},
]
query_body = {
"_source": ["pid", "quarter_time", "pttl_mean", "qttl_mean"],
"query": {"bool": {"filter": filters}},
"size": 10000,
"sort": [{"pid": {"order": "asc"}},
{"quarter_time": {"order": "desc"}}],
}
es_result = await es.search_origin(body=query_body,
index=constants.POINT_15MIN_INDEX)
point_infos = es_result["hits"]["hits"]
point_map = {}
for i in point_infos:
item = i["_source"]
point_map.setdefault(item["pid"], []).append(
{
"quarter_time": item["quarter_time"],
"pttl_mean": item["pttl_mean"],
"qttl_mean": item["qttl_mean"],
}
)
total_pttl, total_qttl = 0, 0
for point_id, records in point_map.items():
total_pttl += records[0]["pttl_mean"]
total_qttl += records[0]["qttl_mean"]
# 计算实时功率的公式
cos_ttl = ""
l = sqrt(total_pttl * total_pttl + total_qttl * total_qttl)
if l:
cos_ttl = round(total_pttl / l, 2)
if type(last_month_cos) in [int, float]:
last_month_cos = round(last_month_cos, 2)
return cos_ttl, last_month_cos
async def power_factor_new15(cid):
async def cal_power_factor(cid):
"""首页获取实时功率因数, 上月功率因数"""
point_sql = "select pid,inlid from point where cid=%s and add_to_company=1"
async with MysqlUtil() as conn:
......@@ -971,7 +771,7 @@ async def power_factor_new15(cid):
type(i["kpi_x"]) in [int, float]
]
last_month_cos = min(pf_kpi_x_list) if len(pf_kpi_x_list) else ""
dt = pendulum.now(tz="Asia/Shanghai")
tstamp = dt.int_timestamp // (15 * 60) * (15 * 60)
dt = pendulum.from_timestamp(tstamp, tz="Asia/Shanghai")
......@@ -999,7 +799,7 @@ async def power_factor_new15(cid):
l = sqrt(total_pttl * total_pttl + total_qttl * total_qttl)
if l:
cos_ttl = round(total_pttl / l, 2)
if type(last_month_cos) in [int, float]:
last_month_cos = round(last_month_cos, 2)
return cos_ttl, last_month_cos
......@@ -1011,12 +811,12 @@ async def optimization_count_info(company_id: int):
:param company_id:
:return:
"""
async with MysqlUtil() as conn:
sql = "SELECT inlid, `name` FROM inline WHERE cid=%s"
inlines = await conn.fetchall(sql, args=(company_id,))
inline_ids = [inline["inlid"] for inline in inlines]
# 获取公司上月用电
# now = datetime.now()
# es_start_time = (
......@@ -1028,12 +828,12 @@ async def optimization_count_info(company_id: int):
# "%Y-%m-%dT%H:%M:%S+08:00")
# power_use_info = await company_power_use_info(company_id, es_start_time,
# es_end_time)
now = datetime.now()
start_time = (
pendulum.datetime(now.year, now.month, 1)
.subtract(months=1)
.strftime("%Y-%m-%d %H:%M:%S")
.subtract(months=1)
.strftime("%Y-%m-%d %H:%M:%S")
)
end_time = pendulum.datetime(now.year, now.month, 1).strftime(
"%Y-%m-%d %H:%M:%S")
......@@ -1057,13 +857,13 @@ async def optimization_count_info(company_id: int):
}
)
return count_info_map
now = datetime.now()
if now.month == 1:
last_month_dt = datetime(year=now.year - 1, month=12, day=1)
else:
last_month_dt = datetime(year=now.year, month=now.month - 1, day=1)
# 功率因数
async with MysqlUtil() as conn:
sql = "SELECT inlid, `cos`, save_charge pf_cost, kpi_x, save_charge " \
......@@ -1077,7 +877,7 @@ async def optimization_count_info(company_id: int):
2,
)
total_pf_save = 0 if total_pf_save <= 0 else total_pf_save
pf_kpi_x_list = [
i["kpi_x"] for i in power_factor_results if
type(i["kpi_x"]) in [int, float]
......@@ -1093,20 +893,20 @@ async def optimization_count_info(company_id: int):
pf_desc = "空间适中"
else:
pf_desc = "空间较大"
count_info_map["power_factor"] = {
"save_charge": total_pf_save if pf_kpi_x != "" else "",
"kpi_x": pf_kpi_x,
"desc": pf_desc,
}
# 移峰填谷指数
async with MysqlUtil() as conn:
sql = "select `score`, `cost_save` from `algo_plsi_result` " \
"where `inlid` in %s and `month` = %s"
last_month_str = datetime.strftime(last_month_dt, "%Y-%m")
pcvfs = await conn.fetchall(sql, args=(inline_ids, last_month_str))
pcvf_kpi_x_list = [i["score"] for i in pcvfs if
type(i["score"]) in [int, float]]
pcvf_kpi_x = min(pcvf_kpi_x_list) if len(pcvf_kpi_x_list) else ""
......@@ -1114,7 +914,7 @@ async def optimization_count_info(company_id: int):
sum([i["cost_save"] for i in pcvfs if
i["cost_save"] and i["cost_save"] >= 0]), 2
)
if pcvf_kpi_x == "":
pcvf_desc = ""
elif pcvf_kpi_x >= 90:
......@@ -1125,14 +925,14 @@ async def optimization_count_info(company_id: int):
pcvf_desc = "空间适中"
else:
pcvf_desc = "空间较大"
total_pcvf_save = 0 if total_pcvf_save <= 0 else total_pcvf_save
count_info_map["pcvf"] = {
"save_charge": total_pcvf_save if pcvf_kpi_x != "" else "",
"kpi_x": pcvf_kpi_x,
"desc": pcvf_desc,
}
# 经济运行
async with MysqlUtil() as conn:
sql = "select `kpi_x`, `save_charge`, `mean_load_factor` " \
......@@ -1169,13 +969,13 @@ async def optimization_count_info(company_id: int):
economic_desc = "空间适中"
else:
economic_desc = "空间较大"
count_info_map["power_save"] = {
"save_charge": total_economic_save if economic_kpi_x != "" else "",
"kpi_x": economic_kpi_x,
"desc": economic_desc,
}
# 最大需量
async with MysqlUtil() as conn:
sql = (
......@@ -1187,7 +987,7 @@ async def optimization_count_info(company_id: int):
)
last_month_str = datetime.strftime(last_month_dt, "%Y-%m")
md_spaces = await conn.fetchall(sql, args=(inline_ids, last_month_str))
md_space_kpi_x_list = [i["kpi_x"] for i in md_spaces if
type(i["kpi_x"]) in [int, float]]
md_space_kpi_x = max(md_space_kpi_x_list) if len(
......@@ -1216,7 +1016,7 @@ async def optimization_count_info(company_id: int):
"kpi_x": md_space_kpi_x,
"desc": md_space_desc,
}
total_save_cost = 0
for _, item in count_info_map.items():
total_save_cost += (
......@@ -1225,12 +1025,12 @@ async def optimization_count_info(company_id: int):
)
save_percent = total_save_cost / month_charge if month_charge else ""
count_info_map["save_percent"] = save_percent
# 计算最大需量
async with MysqlUtil() as conn:
sql = "select `price_md`,`price_tc` from `price_policy` where `cid`=%s"
price_policy = await conn.fetchone(sql, args=(company_id,))
total_md_space_charge = sum(
[i["inline_md_charge"] for i in md_spaces if i["inline_md_charge"]])
total_md_space_p = (
......@@ -1239,7 +1039,7 @@ async def optimization_count_info(company_id: int):
else ""
)
count_info_map["md_space_p"] = total_md_space_p
# 经济运行最低负载率
mean_load_factors = [
i["mean_load_factor"] for i in economic_operations if
......@@ -1274,12 +1074,12 @@ async def electric_use_info_sdu(cid):
}
}
}
log.info("cal_score_safe_electric query_body={}".format(query_body))
async with EsUtil() as es:
es_result = await es.search_origin(body=query_body,
index=constants.POINT_1MIN_EVENT)
score_buckets = (
es_result.get("aggregations", {}).get("alarm_aggs", {}).get("buckets",
[])
......@@ -1294,7 +1094,7 @@ async def electric_use_info_sdu(cid):
second_alarm_cnt += bucket["doc_count"]
elif bucket["key"] == Importance.Third.value:
third_alarm_cnt += bucket["doc_count"]
company_point_map = await get_points([cid])
point_len = len(company_point_map.get(cid) or {})
alarm_score = (
......@@ -1305,9 +1105,9 @@ async def electric_use_info_sdu(cid):
)
if alarm_score >= 15:
alarm_score = 15
electric_use_score = get_electric_index(alarm_score)
log.info(
"point_len={} alarm_score={} electric_use_score={}".format(
point_len, alarm_score, electric_use_score
......@@ -1359,71 +1159,6 @@ async def electric_use_info_sdu_new15(cid):
async def electric_use_info_points_sdu(start, end, points):
"""用电安全指数, 识电u, 根据points来计算"""
start_es = convert_es_str(start)
end_es = convert_es_str(end)
query_body = {
"query": {
"bool": {
"filter": [
{"terms": {"point_id": points}},
{"range": {
"datetime": {"gte": start_es, "lte": end_es, }}},
{"terms": {"type.keyword": SDU_ALARM_LIST}}
],
}
},
"size": 0,
"aggs": {
"alarm_aggs": {
"terms": {"field": "importance"}
}
}
}
log.info("electric_use_info_points query_body={}".format(query_body))
async with EsUtil() as es:
es_result = await es.search_origin(body=query_body,
index=constants.POINT_1MIN_EVENT)
score_buckets = (
es_result.get("aggregations", {}).get("alarm_aggs", {}).get("buckets",
[])
)
first_alarm_cnt = 0
second_alarm_cnt = 0
third_alarm_cnt = 0
for bucket in score_buckets:
if bucket["key"] == Importance.First.value:
first_alarm_cnt += bucket["doc_count"]
elif bucket["key"] == Importance.Second.value:
second_alarm_cnt += bucket["doc_count"]
elif bucket["key"] == Importance.Third.value:
third_alarm_cnt += bucket["doc_count"]
alarm_score = (first_alarm_cnt * 2 + second_alarm_cnt * 1 +
third_alarm_cnt * 0.5) / len(points)
if alarm_score >= 15:
alarm_score = 15
electric_use_score = get_electric_index(alarm_score)
log.info(
"point_len={} alarm_score={} electric_use_score={}".format(
len(points), alarm_score, electric_use_score
)
)
return ElectricInfo(
first_alarm_cnt=first_alarm_cnt,
second_alarm_cnt=second_alarm_cnt,
third_alarm_cnt=third_alarm_cnt,
alarm_score=alarm_score,
electric_use_score=electric_use_score,
)
async def electric_use_info_points_sdu_new15(start, end, points):
"""用电安全指数, 识电u, 根据points来计算"""
sql = f"select importance,count(*) as doc_count from point_1min_event " \
f"where pid in %s and event_datetime BETWEEN %s and %s " \
......@@ -1431,7 +1166,7 @@ async def electric_use_info_points_sdu_new15(start, end, points):
async with MysqlUtil() as conn:
results = await conn.fetchall(sql, args=(points, start, end,
SDU_ALARM_LIST))
first_alarm_cnt = 0
second_alarm_cnt = 0
third_alarm_cnt = 0
......@@ -1442,15 +1177,15 @@ async def electric_use_info_points_sdu_new15(start, end, points):
second_alarm_cnt += result["doc_count"]
elif result["importance"] == Importance.Third.value:
third_alarm_cnt += result["doc_count"]
alarm_score = (first_alarm_cnt * 2 + second_alarm_cnt * 1 +
third_alarm_cnt * 0.5) / len(points)
if alarm_score >= 15:
alarm_score = 15
electric_use_score = get_electric_index(alarm_score)
log.info(
"point_len={} alarm_score={} electric_use_score={}".format(
len(points), alarm_score, electric_use_score
......@@ -1471,16 +1206,16 @@ async def optimization_count_info_new(company_id: int):
:param company_id:
:return:
"""
inlines = await get_inline_by_cid(company_id)
inline_ids = [inline["inlid"] for inline in inlines]
# 获取公司上月用电
now = datetime.now()
es_start_time = (
pendulum.datetime(now.year, now.month, 1)
.subtract(months=1)
.strftime("%Y-%m-%dT%H:%M:%S+08:00")
.subtract(months=1)
.strftime("%Y-%m-%dT%H:%M:%S+08:00")
)
es_end_time = pendulum.datetime(now.year, now.month, 1).strftime(
"%Y-%m-%dT%H:%M:%S+08:00")
......@@ -1512,7 +1247,7 @@ async def optimization_count_info_new(company_id: int):
last_month_dt = datetime(year=now.year, month=now.month - 1, day=1)
last_month_str = datetime.strftime(last_month_dt, "%Y-%m")
# 功率因数
power_factor_results = await get_power_factor_kpi(inline_ids,
last_month_dt)
total_pf_save = round(
......@@ -1525,7 +1260,7 @@ async def optimization_count_info_new(company_id: int):
(i["name"], i["kpi_x"]) for i in power_factor_results if
type(i["kpi_x"]) in [int, float]
]
if len(pf_kpi_x_list):
pf_kpi_x_num = [pf_kpi[1] for pf_kpi in pf_kpi_x_list]
pf_kpi_x = min(pf_kpi_x_num)
......@@ -1560,12 +1295,12 @@ async def optimization_count_info_new(company_id: int):
pcvfs = await get_pcvf_kpi(inline_ids, last_month_str)
pcvf_kpi_x_list = [(i["name"], i["score"]) for i in pcvfs if
type(i["score"]) in [int, float]]
if len(pcvf_kpi_x_list):
pcvf_kpi_x_num = [pcvf_kpi[1] for pcvf_kpi in pcvf_kpi_x_list]
pcvf_kpi_x = min(pcvf_kpi_x_num)
pcvf_kpi_x_name = []
if pcvf_kpi_x < 70:
for index, kpi_num in enumerate(pcvf_kpi_x_num):
if kpi_num < 70:
......@@ -1579,7 +1314,7 @@ async def optimization_count_info_new(company_id: int):
f"引入新能源,转移高峰电量至低谷"
else:
pcvf_desc = "平均电价处于较低水平,请继续保持"
else:
pcvf_kpi_x = ""
pcvf_desc = ""
......@@ -1597,7 +1332,7 @@ async def optimization_count_info_new(company_id: int):
sum([i["cost_save"] for i in pcvfs if
i["cost_save"] and i["cost_save"] >= 0]), 2
)
total_pcvf_save = 0 if total_pcvf_save <= 0 else total_pcvf_save
count_info_map["pcvf"] = {
"save_charge": total_pcvf_save if pcvf_kpi_x != "" else "",
......@@ -1605,7 +1340,7 @@ async def optimization_count_info_new(company_id: int):
"desc": pcvf_desc,
"space": pcvf_space
}
# 经济运行
economic_operations = await get_economic_kpi(inline_ids, last_month_str)
economic_kpi_x_list = [
......@@ -1666,14 +1401,14 @@ async def optimization_count_info_new(company_id: int):
"desc": economic_desc,
"space": econ_space
}
# 容量、需量价格
price_policy = await price_policy_by_cid(company_id)
price_md = price_policy["price_md"] if price_policy["price_md"] else 0
price_tc = price_policy["price_tc"] if price_policy["price_tc"] else 0
# 最大需量
md_spaces = await get_md_space(inline_ids, last_month_dt)
md_space_kpi_x_list = [i["kpi_x"] for i in md_spaces if
type(i["kpi_x"]) in [int, float]]
md_space_kpi_x = max(md_space_kpi_x_list) if len(
......@@ -1703,13 +1438,13 @@ async def optimization_count_info_new(company_id: int):
md_space_tc_runtimes[index]["tc_runtime"] * price_tc >= \
price_md * item["inline_md_predict"]:
md_space_name.append(md_space_tc_runtimes[index]["name"])
if len(md_space_name):
md_space_desc = f"若次月负荷无较大变动,建议{'、'.join(md_space_name)}" \
f"选择按最大需量计费"
else:
md_space_desc = "不存在容改需空间"
count_info_map["md_space"] = {
"save_charge": total_md_space_save if md_space_kpi_x != "" else "",
"kpi_x": md_space_kpi_x,
......@@ -1751,7 +1486,7 @@ async def cid_alarm_importance_count(cid, start, end):
point_list = [i["pid"] for i in monitor_point_list]
es_res = await sdu_alarm_importance_dao_new15(start, end, point_list)
es_res_key = {i["key"]: i for i in es_res}
res_list = []
for info in monitor_point_list:
name = info.get("name")
......@@ -1767,7 +1502,7 @@ async def cid_alarm_importance_count(cid, start, end):
tmp_dic["second"] += b["doc_count"]
elif b["key"] == Importance.Third.value:
tmp_dic["third"] += b["doc_count"]
tmp_dic["alarm_count"] = tmp_dic["first"] + tmp_dic["second"] + \
tmp_dic[
"third"]
......
......@@ -2,8 +2,6 @@ import ast
from unify_api.constants import SDU_ONE_TWO_GRADE_ALARM
from pot_libs.mysql_util.mysql_util import MysqlUtil
from unify_api.constants import CO2_N
from unify_api.modules.alarm_manager.dao.list_static_dao import \
sdu_alarm_aggs_date, sdu_alarm_aggs_date_importance
from unify_api.modules.carbon_neutral.service.carbon_reduce_service import \
carbon_emission_index_service
from unify_api.modules.common.dao.common_dao import monitor_by_cid, tsp_by_cid, \
......@@ -21,10 +19,10 @@ from unify_api.modules.home_page.dao.count_info_dao import \
alarm_aggs_point_location
from unify_api.modules.home_page.procedures.count_info_pds import other_info, \
electric_use_info, cid_alarm_importance_count, \
alarm_importance_count_total, power_factor, current_load, \
alarm_importance_count_total, current_load, \
get_company_charge_price, health_status_res, carbon_status_res_web, \
optimization_count_info, economic_index_desc, electric_use_info_new15, \
power_factor_new15, current_load_new15
optimization_count_info, economic_index_desc, \
cal_power_factor, current_load_new15
from unify_api.modules.home_page.procedures.count_info_proxy_pds import \
alarm_percentage_count, alarm_safe_power
from unify_api.modules.tsp_water.dao.drop_dust_dao import \
......@@ -170,7 +168,8 @@ async def info_yang_chen_service(cid):
if pm25_max_list and max(pm25_max_list) < 35:
air_quality += 1
# 3. 安全运行天数, 从接入平台算起,未出现一级报警则加一天
alarm_es = await sdu_alarm_aggs_date_importance(cid)
# alarm_es = await sdu_alarm_aggs_date_importance(cid)
alarm_es = []
safe_operation_days = 0
for alarm in alarm_es:
in_bucket = alarm["importance"]["buckets"]
......@@ -269,7 +268,7 @@ async def alarm_price_costtl_service(cid):
# 1. 今日报警
imp_dic = await alarm_importance_count_total(cid, today_start, today_end)
# 2. 实时功率因数, 上月功率因数
cos_ttl, last_month_cos = await power_factor_new15(cid)
cos_ttl, last_month_cos = await cal_power_factor(cid)
# 3. 实时负荷
cur_load = await current_load_new15(cid)
# 4. 平均电价
......@@ -400,7 +399,7 @@ async def all_index_info_service(cid):
health_index = round(health_index)
health_status = health_status_res(health_index, "web")
# 2. 安全指数
elec_info = await electric_use_info_new15(cid)
elec_info = await electric_use_info(cid)
safety_index = elec_info.electric_use_score
safety_status = safety_ratio_res(safety_index, "web")
# 3. 碳排指数
......
......@@ -24,10 +24,10 @@ from unify_api.modules.home_page.procedures.count_info_pds import (
normal_rate_of_location, normal_rate_of_location_new15,
other_info, other_info_new15,
power_count_info, power_count_info_new15,
electric_use_info, electric_use_info_new15,
electric_use_info,
datetime_to_timestamp,
power_charge_price, power_charge_price_new15,
power_factor, power_factor_new15,
cal_power_factor,
optimization_count_info, optimization_count_info_new
)
from unify_api.modules.home_page.service.count_info_service import \
......@@ -79,7 +79,7 @@ async def post_count_info(request, body: CountInfoReq) -> CountInfoResp:
# 用电安全指数, 报警分, 近30天报警1,2,3级数目
# electric_info = await electric_use_info(company_id)
electric_info = await electric_use_info_new15(company_id)
electric_info = await electric_use_info(company_id)
# 昨日平均电价, 上月平均电价
# yestoday_price, last_month_price = await power_charge_price(
......@@ -89,7 +89,7 @@ async def post_count_info(request, body: CountInfoReq) -> CountInfoResp:
# 实时功率因数, 上月功率因数
# cos_ttl, last_month_cos = await power_factor(company_id)
cos_ttl, last_month_cos = await power_factor_new15(company_id)
cos_ttl, last_month_cos = await cal_power_factor(company_id)
# 其实异常捕获这个东西最好是在框架内部做一次就够了
except (ElasticsearchException, MySQLError, RedisError) as e:
......
......@@ -33,8 +33,7 @@ from unify_api.modules.home_page.components.count_info_proxy_cps import (
AlarmRankingReq,
AipResp, CisResp, CisReq,
)
from unify_api.modules.home_page.procedures.count_info_pds import other_info, \
electric_use_info
from unify_api.modules.home_page.procedures.count_info_pds import other_info
from unify_api.modules.home_page.procedures.count_info_proxy_pds import (
security_level_count,
alarm_percentage_count,
......
import json
from datetime import datetime, timedelta
from pot_libs.mysql_util.mysql_util import MysqlUtil
from pot_libs.utils.exc_util import BusinessException
from unify_api import constants
from unify_api.modules.alarm_manager.dao.list_static_dao import \
sdu_alarm_behavior_dao, sdu_alarm_behavior_dao_new15
from unify_api.modules.common.procedures.points import get_meter_by_point, \
get_meter_by_point_new15
sdu_alarm_behavior_dao
from unify_api.modules.common.procedures.points import \
get_meter_by_point
from unify_api.modules.home_page.procedures.count_info_pds import \
electric_use_info_points_sdu, electric_use_info_points_sdu_new15
electric_use_info_points_sdu
from unify_api.modules.shidianu.components.algorithm_cps import WcResp, AbcResp
from unify_api.modules.shidianu.dao.analysis_result_dao import \
query_sdu_power_wave, query_sdu_recog_record
......@@ -19,67 +18,11 @@ from unify_api.utils.time_format import last30_day_range, \
get_start_end_by_tz_time, day_slots, get_start_end_by_tz_time_new
async def wave_curve_service(point_id, req_date, product):
# 1. 获取曲线数据和slots
meter_info = await get_meter_by_point(point_id)
if not meter_info:
raise BusinessException(message="没有该监测点的meter信息,请联系运维人员!")
sid, meter_no = meter_info["sid"], meter_info["meter_no"]
dt = datetime.strptime(req_date + " 00:00:00", "%Y-%m-%d %H:%M:%S")
time_slot = [
datetime.strftime(dt + timedelta(minutes=i),
"%Y-%m-%d %H:%M:%S").split(" ")[1][:5]
for i in range(1440)
]
p_list = await get_p_list(sid, meter_no, req_date, time_slot)
# 2. 获取用电设备识别结果
start, end = get_start_end_by_tz_time(req_date)
device_data = await query_sdu_recog_record(point_id, start, end)
electric_actions = {}
if device_data:
for i in device_data:
recog_dt = str(i["recog_dt"])
recog_dt = recog_dt.split(" ")[1][:5]
act_info = json.loads(i["act_info"])
# 拼凑返回格式 04:02 违规电器,疑似电动车电池
tmp_dic = {}
(key, value), = act_info.items()
type_str = constants.SDU_EVENT_TYPE_MAP.get(key)
# 违规,大功率,正常电器, 只保留1个级别最高的展示
if recog_dt not in electric_actions:
electric_actions[recog_dt] = [
{"type": type_str, "value": value, "type_str": key}
]
else:
# 如果是电动车电池, 优先级最高, 同一分钟第一次已经是电动车电池continue
if electric_actions[recog_dt][0]["value"] == "电动车电池":
continue
# 如果本次正常电器, 不需要处理了
# 如果本次违规电器, 替换原来的
elif key == "illegal_ele_app":
electric_actions[recog_dt] = [
{"type": type_str, "value": value, "type_str": key}
]
# 如果本次是大功率电器, 且原本是正常电器, 则替换
elif key == "high_power_app" and electric_actions[recog_dt][0][
"type_str"] == "normal_app":
electric_actions[recog_dt] = [
{"type": type_str, "value": value, "type_str": key}
]
# electric_actions[recog_dt].append(
# {"type": type_str, "value": value})
return WcResp(
time_slot=time_slot,
p_slot=p_list,
electric_actions=electric_actions
)
async def wave_curve_service_new15(point_id, req_date, product):
# 1,获取slots
time_slot = day_slots()
# 2. 获取sid
meter_info = await get_meter_by_point_new15(point_id)
meter_info = await get_meter_by_point(point_id)
if not meter_info:
raise BusinessException(message="没有该监测点的monitor信息,请联系运维人员!")
mtid, meter_no = meter_info["mtid"], meter_info["meter_no"]
......@@ -127,6 +70,7 @@ async def wave_curve_service_new15(point_id, req_date, product):
electric_actions=electric_actions
)
async def alarm_behavior_curve_service(point_id, req_date, product):
# 1. 获取功率波动, 如果没有查询到功率波动,返回None
wave_data = await query_sdu_power_wave(point_id, req_date + " 00:00:00")
......@@ -136,9 +80,7 @@ async def alarm_behavior_curve_service(point_id, req_date, product):
power_swing = None
# 2. 安全评价
start, end = last30_day_range()
# alarm_res = await electric_use_info_points_sdu(start, end, [point_id])
alarm_res = await electric_use_info_points_sdu_new15(start, end,
[point_id])
alarm_res = await electric_use_info_points_sdu(start, end, [point_id])
safety_eval = {"first_alarm_cnt": alarm_res.first_alarm_cnt,
"second_alarm_cnt": alarm_res.second_alarm_cnt,
"third_alarm_cnt": alarm_res.third_alarm_cnt,
......@@ -147,7 +89,7 @@ async def alarm_behavior_curve_service(point_id, req_date, product):
}
# 3. 行为统计
# behavior_res = await sdu_alarm_behavior_dao(start, end, [point_id])
behavior_res = await sdu_alarm_behavior_dao_new15(start, end, [point_id])
behavior_res = await sdu_alarm_behavior_dao(start, end, [point_id])
behavior_illegal_app = []
if behavior_res:
for i in behavior_res:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment