Commit fbacfff2 authored by lcn's avatar lcn

bug修复

parent 99c71089
......@@ -30,7 +30,8 @@ async def load_cmpy_power(cids):
async def inline_power_use_info(inline_ids, month_str):
sql = "SELECT inlid, sum(kwh) kwh, sum(charge) charge, sum(p) p FROM " \
"`inline_1day_power` where inlid in %s and " \
f"DATE_FORMAT(create_time, '%%Y-%%m')='{month_str}' GROUP BY inlid"
f"DATE_FORMAT(create_time, '%%Y-%%m-%%d')='{month_str}' GROUP BY " \
f"inlid"
async with MysqlUtil() as conn:
datas = await conn.fetchall(sql, args=(inline_ids,))
inline_power_info_map = {
......
......@@ -75,6 +75,8 @@ async def get_elec_mtid_sid_by_cid(cid):
async def load_add_to_compy_ids(cid):
if not isinstance(cid, list) or not isinstance(cid, tuple):
cid = [cid]
db = SETTING.mysql_db
sql = f"SELECT monitor.mtid, monitor.sid FROM {db}.monitor " \
f"INNER JOIN {db}.point ON point.mtid=monitor.mtid " \
......
from pot_libs.es_util.es_utils import EsUtil
from pot_libs.settings import SETTING
from unify_api.modules.anshiu.dao.fine_monitor_dao import get_mtid_by_pid_dao, \
get_sid_by_mtid_dao, get_mtids_by_pids_dao
from unify_api.utils.time_format import convert_es_str
from unify_api.utils.common_utils import make_tdengine_data_as_list
from unify_api.utils.taos_new import get_td_engine_data
from unify_api.utils.time_format import convert_es_str, CST
from unify_api.modules.zhiwei_u.config import SCOPE_DATABASE
from pot_libs.mysql_util.mysql_util import MysqlUtil
async def query_point_1min_index(p_database, date_start, date_end,
point_id):
async def query_point_1min_index(date_start, date_end,
mtid, fields=None):
"""point点某一天1440个点数据"""
start_es = convert_es_str(date_start)
end_es = convert_es_str(date_end)
query_body = {
"size": 10000,
"query": {
"bool": {
"must": [
{
"term": {
"point_id": point_id
}
},
{
"range": {
"datetime": {
"gte": start_es,
"lte": end_es
}
}
}
]
}
},
"sort": [{"datetime": {"order": "asc"}}]
}
async with EsUtil() as es:
es_re = await es.search(body=query_body, index=p_database)
return es_re
async def query_location_1min_index(l_database, date_start, date_end,
location_id):
if fields:
fields.insert(0, 'ts')
fields_str = ",".join(fields)
else:
fields_str = "*"
db = "db_electric"
url = f"{SETTING.stb_url}{db}"
table_name = "mt%s_ele" % mtid
sql = f"select {fields_str} from {table_name} where ts >= '{date_start}'" \
f" and ts <= '{date_end}' order by ts asc"
is_succ, tdengine_data = await get_td_engine_data(url, sql)
if not is_succ:
return []
results = make_tdengine_data_as_list(tdengine_data)
return results
async def query_location_1min_index(date_start, date_end, mtid,
fields=None):
"""location点某一天1440*n个点数据"""
start_es = convert_es_str(date_start)
end_es = convert_es_str(date_end)
query_body = {
"size": 10000,
"query": {
"bool": {
"must": [
{
"terms": {
"location_id": location_id
}
},
{
"range": {
"datetime": {
"gte": start_es,
"lte": end_es
}
}
}
]
}
},
"sort": [{"datetime": {"order": "asc"}}]
}
async with EsUtil() as es:
es_re = await es.search(body=query_body, index=l_database)
return es_re
if fields:
fields.insert(0, 'ts')
fields_str = ",".join(fields)
else:
fields_str = "*"
db = "db_adio"
url = f"{SETTING.stb_url}{db}"
table_name = "mt%s_adi" % mtid
sql = f"select {fields_str} from {table_name} where ts >= '{date_start}'" \
f" and ts <= '{date_end}' order by ts asc"
is_succ, tdengine_data = await get_td_engine_data(url, sql)
if not is_succ:
return []
results = make_tdengine_data_as_list(tdengine_data)
return results
async def get_search_scope(cid, pid, start, end):
......
......@@ -58,25 +58,21 @@ async def sid_to_params_service(sid, pid):
async def get_temp_data(pid, start, end, slots):
mt_data = await get_mtid_by_pid(pid)
location_data = await get_locationid_by_mtid(mt_data["mtid"],
"temperature")
all_datas = {
"Atemp": ['' for _ in range(1440)],
"Btemp": ['' for _ in range(1440)],
"Ctemp": ['' for _ in range(1440)],
"Ntemp": ['' for _ in range(1440)]
}
location_id = [i.get("id") for i in location_data]
location_temp = {location["id"]: location["item"][0] + location["type"][:4]
for location in location_data}
l_database = "poweriot_location_1min_aiao_" + start[:4] + "_" + \
str(int(start[5:7]))
datas = await query_location_1min_index(l_database, start, end,
location_id)
datas = await query_location_1min_index(start, end, mt_data["mtid"],
fields=["temp1", "temp2", "temp3",
"temp4"])
for data in datas:
key = location_temp[data["location_id"]]
index = slots.index(data["datetime"][11:16])
all_datas[key][index] = data["value"]
index = slots.index(data["ts"])
all_datas["Atemp"][index] = data["temp1"]
all_datas["Btemp"][index] = data["temp2"]
all_datas["Ctemp"][index] = data["temp3"]
all_datas["Ntemp"][index] = data["temp4"]
return all_datas
......@@ -101,13 +97,11 @@ async def data_operation_search_service(mtid, point, params, start, end,
threhold = await get_residual_current_threhold(resi_data[0]["id"])
residual_current_threhold = threhold["threshold"] if threhold else 30
resi_mid_data = {"resi": ['' for _ in range(1440)]}
l_database = "poweriot_location_1min_aiao_" + start[:4] + "_" + \
str(int(start[5:7]))
resi_datas = await query_location_1min_index(l_database, start, end,
[resi_data[0]["id"]])
resi_datas = await query_location_1min_index(start, end, mt_data[
"mtid"], fields=["residual_current"])
for data in resi_datas:
index = slots.index(data["datetime"][11:16])
resi_mid_data["resi"][index] = data["value"]
index = slots.index(data["ts"])
resi_mid_data["resi"][index] = data["residual_current"]
resi = RESI(threhold=residual_current_threhold,
value_slots=resi_mid_data.get("resi"))
ctnum, _ = await get_wiring_type(point)
......@@ -125,10 +119,8 @@ async def data_operation_search_service(mtid, point, params, start, end,
words.append(p)
for i in words:
all_datas[i] = []
p_database = "poweriot_point_1min_index_" + start[:4] + "_" + \
str(int(start[5:7]))
origin_datas = await query_point_1min_index(p_database, start, end, point)
new_data = {d["datetime"][11:16]: d for d in origin_datas}
origin_datas = await query_point_1min_index(start, end, mtid, words)
new_data = {d["ts"]: d for d in origin_datas}
for slot in slots:
if slot in new_data.keys():
for i in all_datas.keys():
......@@ -363,16 +355,21 @@ def get_download_data(sdu_u, params):
if "thdu" in params:
key.remove("thdu")
key += [
"thdua", "hr3ua", "hr5ua", "hr7ua", "hr9ua", "hr11ua", "hr13ua",
"thdub", "hr3ub", "hr5ub", "hr7ub", "hr9ub", "hr11ub", "hr13ub",
"thdua", "hr3ua", "hr5ua", "hr7ua", "hr9ua", "hr11ua",
"hr13ua",
"thdub", "hr3ub", "hr5ub", "hr7ub", "hr9ub", "hr11ub",
"hr13ub",
"thduc", "hr3uc", "hr5uc", "hr7uc", "hr9uc", "hr11uc", "hr13uc"
]
if "thdi" in params:
key.remove("thdi")
key += [
"thdia", "hr3ia", "hr5ia", "hr7ia", "hr9ia", "hr11ia", "hr13ia",
"thdic", "hr3ic", "hr5ic", "hr7ic", "hr9ic", "hr11ic", "hr13ic",
"thdib", "hr3ib", "hr5ib", "hr7ib", "hr9ib", "hr11ib", "hr13ib",
"thdia", "hr3ia", "hr5ia", "hr7ia", "hr9ia", "hr11ia",
"hr13ia",
"thdic", "hr3ic", "hr5ic", "hr7ic", "hr9ic", "hr11ic",
"hr13ic",
"thdib", "hr3ib", "hr5ib", "hr7ib", "hr9ib", "hr11ib",
"hr13ib",
]
if "fdie" in params:
key.remove("fdie")
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment