Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
U
unify_api2
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
chaonan
unify_api2
Commits
af2f7607
Commit
af2f7607
authored
Jul 03, 2023
by
lcn
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
bug修复
parent
21b257c1
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
58 additions
and
140 deletions
+58
-140
scope_record_dao.py
unify_api/modules/scope_analyse/dao/scope_record_dao.py
+12
-104
scope_record_mysql_dao.py
...y_api/modules/scope_analyse/dao/scope_record_mysql_dao.py
+1
-1
scope_analyse_service.py
...pi/modules/scope_analyse/service/scope_analyse_service.py
+40
-31
scope_analyse.py
unify_api/modules/scope_analyse/views/scope_analyse.py
+5
-4
No files found.
unify_api/modules/scope_analyse/dao/scope_record_dao.py
View file @
af2f7607
...
...
@@ -16,84 +16,11 @@ async def scope_by_sql(mid_sql):
return
data
async
def
scope_by_es
(
cid
,
point_id
,
page_num
,
page_size
,
start
,
end
):
query_body
=
{
"from"
:
(
page_num
-
1
)
*
page_size
,
"size"
:
page_size
,
"query"
:
{
"bool"
:
{
"must"
:
[
{
"term"
:
{
"cid"
:
cid
}
},
{
"terms"
:
{
"point_id"
:
point_id
}
}
]
}
},
"sort"
:
[
{
"datetime"
:
{
"order"
:
"desc"
}
}
]
}
if
start
and
end
:
start_es
=
convert_es_str
(
start
)
end_es
=
convert_es_str
(
end
)
query_body
[
"query"
][
"bool"
][
"must"
]
.
append
(
{
"range"
:
{
"datetime"
:
{
"gte"
:
start_es
,
"lte"
:
end_es
}
}
}
)
async
with
EsUtil
()
as
es
:
es_re
=
await
es
.
search_origin
(
body
=
query_body
,
index
=
"poweriot_point_1min_scope"
)
if
es_re
[
"hits"
]:
total
=
es_re
[
"hits"
][
"total"
]
data
=
[
get_source
(
hit
)
for
hit
in
es_re
[
'hits'
][
'hits'
]]
else
:
data
,
total
=
[],
0
return
data
,
total
async
def
detail_data_by_es
(
scope_id
):
query_body
=
{
"query"
:
{
"bool"
:
{
"must"
:
[
{
"term"
:
{
"_id"
:
scope_id
}
}
]
}
}
}
try
:
async
with
EsUtil
()
as
es
:
es_results
=
await
es
.
search_origin
(
body
=
query_body
,
index
=
"poweriot_point_1min_scope"
)
except
:
log
.
error
(
"es query error"
)
return
ScopeDetailsResponse
()
.
db_error
()
result
=
None
if
es_results
[
"hits"
][
"total"
]:
result
=
es_results
[
"hits"
][
"hits"
][
0
][
"_source"
]
return
result
async
def
detail_data_by_es
(
pid
,
create_time
):
sql
=
f
"select * from point_1min_scope where pid=
%
s and create_time=
%
s"
async
with
MysqlUtil
()
as
conn
:
data
=
await
conn
.
fetchone
(
sql
,
args
=
(
pid
,
create_time
))
return
data
def
get_source
(
hit
):
...
...
@@ -102,29 +29,10 @@ def get_source(hit):
return
result
async
def
event_data_by_es
(
scope_id
):
query_body
=
{
"query"
:
{
"bool"
:
{
"must"
:
[
{
"term"
:
{
"doc_id.keyword"
:
scope_id
}
}
]
}
}
}
try
:
async
with
EsUtil
()
as
es
:
es_results
=
await
es
.
search_origin
(
body
=
query_body
,
index
=
"poweriot_point_1min_event"
)
except
:
log
.
error
(
"es query error"
)
return
ScopeDetailsResponse
()
.
db_error
()
result
=
None
if
es_results
[
"hits"
][
"total"
]:
result
=
es_results
[
"hits"
][
"hits"
][
0
][
"_source"
]
return
result
async
def
event_data_by_es
(
mtid
,
event_datetime
,
event_type
):
sql
=
f
"select * from point_1min_event where mtid=
%
s and "
\
f
"event_datetime=
%
s and event_type=
%
s"
async
with
MysqlUtil
()
as
conn
:
data
=
await
conn
.
fetchone
(
sql
,
args
=
(
mtid
,
event_datetime
,
event_type
))
return
data
unify_api/modules/scope_analyse/dao/scope_record_mysql_dao.py
View file @
af2f7607
...
...
@@ -33,7 +33,7 @@ async def get_point_name_dao(pid):
async
def
get_trigger_params_dao
(
type_id
):
sql
=
"select name from event_type WHERE
id
=
%
s"
sql
=
"select name from event_type WHERE
e_type
=
%
s"
async
with
MysqlUtil
()
as
conn
:
data
=
await
conn
.
fetchone
(
sql
,
args
=
(
type_id
,
))
return
data
unify_api/modules/scope_analyse/service/scope_analyse_service.py
View file @
af2f7607
import
json
import
logging
from
pot_libs.qingstor_util.qs_client
import
QsClient
from
unify_api.utils
import
time_format
from
unify_api.modules.zhiwei_u.dao.warning_operations_dao
import
\
select_point_dao
...
...
@@ -8,9 +9,8 @@ from unify_api.modules.scope_analyse.dao.scope_record_mysql_dao import \
get_mtid_dao
,
get_location_id_dao
,
get_threhold_dao
,
get_point_name_dao
,
\
get_trigger_params_dao
from
unify_api.modules.zhiwei_u.fault_foreast.actionFile
import
actionFile
from
unify_api.modules.zhiwei_u.fault_foreast.test
import
leakage_reg
from
unify_api.modules.scope_analyse.dao.scope_record_dao
import
\
scope_by_es
,
detail_data_by_es
,
event_data_by_es
,
scope_by_sql
detail_data_by_es
,
event_data_by_es
,
scope_by_sql
from
unify_api.modules.scope_analyse.components.scope_analyse_cps
import
\
ScopeRecordResp
,
ScopeRecord
,
ScopeAnalyseResp
,
LeakageCurrentResp
from
pot_libs.logger
import
log
...
...
@@ -19,7 +19,6 @@ from unify_api.utils.response_code import RET
async
def
scope_record_service
(
cid
,
point_id
,
page_num
,
page_size
,
start
,
end
):
# datas, total = await scope_by_es(cid, point_id, page_num, page_size, start, end)
li
=
[
f
"cid={cid}"
]
if
point_id
:
if
len
(
point_id
)
==
1
:
...
...
@@ -36,8 +35,8 @@ async def scope_record_service(cid, point_id, page_num, page_size, start, end):
# 动态漏电流阈值
rows
=
[]
if
datas
:
start
=
(
page_num
-
1
)
*
page_size
datas
=
datas
[
start
:
start
+
page_num
]
start
=
(
page_num
-
1
)
*
page_size
datas
=
datas
[
start
:
start
+
page_num
]
for
data
in
datas
:
# 漏电流
if
data
[
"fault_type"
]
in
(
"over_res_cur"
,
"overResidualCurrent"
):
...
...
@@ -65,42 +64,51 @@ async def scope_record_service(cid, point_id, page_num, page_size, start, end):
record_type_name
,
probability
=
"不存在故障"
,
""
fault_type
=
await
get_trigger_params_dao
(
data
[
"fault_type"
])
reason
=
fault_type
.
get
(
"name"
)
dt
=
time_format
.
convert_to_dt
(
data
[
"datetime"
])
dt
=
data
[
"create_time"
]
check_dt
=
time_format
.
convert_dt_to_timestr
(
dt
)
sr
=
ScopeRecord
(
trigger_time
=
check_dt
,
point_id
=
data
[
"point_id"
],
point_name
=
point_dict
.
get
(
data
[
"point_id"
]),
check_timestamp
=
int
(
time_format
.
convert_dt_to_timestamp
(
dt
))
sr
=
ScopeRecord
(
trigger_time
=
check_dt
,
point_id
=
data
[
"pid"
],
point_name
=
point_dict
.
get
(
data
[
"pid"
]),
record_type
=
data
[
"fault_type"
],
record_type_name
=
record_type_name
,
probability
=
probability
,
reason
=
reason
,
scope_id
=
data
[
"_id"
])
scope_id
=
"{}_{}"
.
format
(
data
[
"pid"
],
check_timestamp
))
rows
.
append
(
sr
)
return
ScopeRecordResp
(
rows
=
rows
,
total
=
total
)
# 故障诊断-波形分析
async
def
scope_analyse_service
(
scope_id
):
data
=
await
detail_data_by_es
(
scope_id
)
async
def
scope_analyse_service
(
pid
,
create_time
):
check_dt
=
time_format
.
get_datetime_str
(
create_time
)
data
=
await
detail_data_by_es
(
pid
,
check_dt
)
if
not
data
:
log
.
info
(
f
"波形分析 没有数据
scope_id:{scope_id
}"
)
log
.
info
(
f
"波形分析 没有数据
pid:{pid},create_time:{create_time
}"
)
return
success_res
(
code
=
RET
.
not_data
,
msg
=
"没有找到该数据"
)
point_name
=
await
get_point_name_dao
(
data
[
"point_id"
])
dt
=
time_format
.
convert_to_dt
(
data
[
"datetime"
])
check_dt
=
time_format
.
convert_dt_to_timestr
(
dt
)
context
=
json
.
loads
(
data
.
get
(
"context"
))
log
.
info
(
f
"波形分析 scope_id:{scope_id}, type:{data['fault_type']}"
)
res
=
data
.
get
(
"result"
)
if
res
:
point_name
=
await
get_point_name_dao
(
pid
)
try
:
async
with
QsClient
()
as
qs
:
context
=
await
qs
.
get_object
(
data
[
"url"
])
except
Exception
as
e
:
log
.
error
(
f
"录波地址无效 url:{data['url']} message:{str(e)}"
)
return
success_res
(
code
=
RET
.
not_data
,
msg
=
"录波地址有误"
)
log
.
info
(
f
"波形分析 pid:{pid},create_time:{create_time}, type:{data['fault_type']}"
)
res
=
data
.
get
(
"index_loc"
)
try
:
res_dic
=
json
.
loads
(
res
)
trigger_point
=
[
d
for
d
in
res_dic
.
values
()][
0
]
.
get
(
"location"
)
else
:
trigger_point
=
None
except
AttributeError
as
e
:
log
.
error
(
f
"录波出发位置有误 index_loc:{data['index_loc']} message:{str(e)}"
)
trigger_point
=
0
# 漏电流
if
data
[
"fault_type"
]
==
"over_res_cur"
:
if
data
[
"fault_type"
]
in
(
"over_res_cur"
,
"overResidualCurrent"
)
:
threhold
=
await
get_threhold
(
data
[
"cid"
],
data
[
"sid"
])
# result = leakage_reg(ileak_rms=context["ileak_rms"],
# leak_hold=threhold)
# log.info(f"actionFile 漏电流 结论:{result}")
trigger_params
=
[
"漏电流越限"
]
reason
=
[{
"title"
:
"漏电流越限"
,
"probability"
:
1
,
"suggest"
:
"逐级排查找出漏电故障点修复,需做好防护措施防止触电"
}]
...
...
@@ -113,7 +121,7 @@ async def scope_analyse_service(scope_id):
point_name
=
point_name
[
"name"
],
trigger_time
=
check_dt
,
trigger_point
=
trigger_point
,
trigger_params
=
trigger_params
,
trigger_params
=
"-"
.
join
(
trigger_params
)
,
reason
=
reason
,
contents
=
context
)
else
:
...
...
@@ -139,15 +147,16 @@ async def scope_analyse_service(scope_id):
# trigger_params[0] = trigger_params[0] + "相"
fault_type
=
await
get_trigger_params_dao
(
data
[
"fault_type"
])
trigger_params
=
[
fault_type
.
get
(
"name"
)]
event
=
await
event_data_by_es
(
scope_id
)
event
=
await
event_data_by_es
(
data
[
"mtid"
],
check_dt
,
data
[
"fault_type"
])
if
event
:
trigger_params
.
insert
(
0
,
f
'{event.get("phase")}相'
)
logging
.
info
(
f
"
scope_id:{scope_id}, fault_type:{data['fault_type']
}, "
f
"result:{result}"
)
return
ScopeAnalyseResp
(
point_id
=
data
[
"point_id"
]
,
logging
.
info
(
f
"
pid:{pid},create_time:{create_time
}, "
f
"
fault_type:{data['fault_type']},
result:{result}"
)
return
ScopeAnalyseResp
(
point_id
=
pid
,
point_name
=
point_name
[
"name"
],
trigger_time
=
check_dt
,
trigger_params
=
[
"漏电流越限"
]
,
trigger_params
=
"-"
.
join
(
trigger_params
)
,
trigger_point
=
trigger_point
,
reason
=
fina_reason
,
contents
=
context
)
...
...
@@ -178,10 +187,10 @@ REASON_DICT = {
"单相断线"
:
"请尽快找出断线故障点修复,避免三相负载长时间缺相运行"
,
"单相接地"
:
"逐级排查找出接地故障点修复,带电排查需疏散无关人员并做好防护措施防止触电"
,
"两相短路"
:
"请立即将短路故障点上级开关断开挂牌,再进行短路故障原因排查修复,"
"防止有人重复送电造成触电及二次短路事故"
,
"防止有人重复送电造成触电及二次短路事故"
,
"两相接地"
:
"逐级排查找出接地故障点修复,带电排查需疏散无关人员并做好防护措施防止触电"
,
"三相短路"
:
"请立即将短路故障点上级开关断开挂牌,再进行短路故障原因排查修复,"
"防止有人重复送电造成触电及二次短路事故"
,
"防止有人重复送电造成触电及二次短路事故"
,
"三相断线"
:
"请尽快找出断线故障点修复,排查需疏散无关人员并做好防护措施防止触电"
,
"两相断线"
:
"请尽快找出断线故障点修复,避免三相负载长时间缺相运行"
,
"漏电越限"
:
"逐级排查找出漏电故障点修复,需做好防护措施防止触电"
...
...
unify_api/modules/scope_analyse/views/scope_analyse.py
View file @
af2f7607
from
pot_libs.sanic_api
import
summary
from
unify_api.modules.scope_analyse.components.scope_analyse_cps
import
\
ScopeRecordReq
,
ScopeRecordResp
,
ScopeAnalyseReq
,
ScopeAnalyseResp
,
\
ScopeRecordReq
,
ScopeRecordResp
,
ScopeAnalyseReq
,
ScopeAnalyseResp
,
\
LeakageCurrentResp
from
unify_api.modules.scope_analyse.service.scope_analyse_service
import
*
...
...
@@ -14,15 +14,16 @@ async def post_scope_record(req, body: ScopeRecordReq) -> ScopeRecordResp:
end
=
body
.
end
page_size
=
body
.
page_size
page_num
=
body
.
page_num
return
await
scope_record_service
(
cid
,
point_id
,
page_num
,
page_size
,
start
,
end
)
return
await
scope_record_service
(
cid
,
point_id
,
page_num
,
page_size
,
start
,
end
)
# 故障诊断-波形分析
@
summary
(
'故障诊断-波形分析'
)
async
def
post_scope_analyse
(
req
,
body
:
ScopeAnalyseReq
)
->
ScopeAnalyseResp
:
scope_id
=
body
.
scope_id
return
await
scope_analyse_service
(
scope_id
)
pid
,
create_time
=
tuple
(
scope_id
.
split
(
"_"
)
)
return
await
scope_analyse_service
(
pid
,
int
(
create_time
))
# 故障诊断-波形分析-漏电流
# @summary('故障诊断-波形分析-漏电流')
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment