Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
U
unify_api2
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
chaonan
unify_api2
Commits
e9a86df4
Commit
e9a86df4
authored
Jul 04, 2023
by
lcn
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
bug修复
parent
43ef9b36
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
54 additions
and
49 deletions
+54
-49
scope_record_dao.py
unify_api/modules/scope_analyse/dao/scope_record_dao.py
+12
-5
scope_analyse_service.py
...pi/modules/scope_analyse/service/scope_analyse_service.py
+40
-43
scope_analyse.py
unify_api/modules/scope_analyse/views/scope_analyse.py
+2
-1
No files found.
unify_api/modules/scope_analyse/dao/scope_record_dao.py
View file @
e9a86df4
...
@@ -8,12 +8,19 @@ from unify_api.modules.zhiwei_u.components.scope_operations_cps import \
...
@@ -8,12 +8,19 @@ from unify_api.modules.zhiwei_u.components.scope_operations_cps import \
from
pot_libs.mysql_util.mysql_util
import
MysqlUtil
from
pot_libs.mysql_util.mysql_util
import
MysqlUtil
async
def
scope_by_sql
(
mid_sql
):
async
def
scope_by_sql
(
mid_sql
,
offset
,
limit
):
sql
=
f
"select * from point_1min_scope where {mid_sql} order by "
\
scope_event_type
=
[
'over_gap_cur'
,
'over_gap_i'
,
'over_gap_pttl'
,
f
"create_time desc limit 500"
'over_gap_u'
,
'over_res_cur'
,
'over_rms_i'
,
'over_rms_pttl'
,
'over_rms_u'
,
'under_rms_u'
]
sql
=
f
"select * from point_1min_scope where {mid_sql} "
\
f
"and fault_type in
%
s"
\
f
"order by create_time desc limit {limit} offset {offset}"
count_sql
=
f
"select count(*) count from point_1min_scope where"
\
f
" {mid_sql} and fault_type in
%
s"
async
with
MysqlUtil
()
as
conn
:
async
with
MysqlUtil
()
as
conn
:
data
=
await
conn
.
fetchall
(
sql
)
data
=
await
conn
.
fetchall
(
sql
,
args
=
(
scope_event_type
,))
return
data
count
=
await
conn
.
fetch_value
(
count_sql
,
args
=
(
scope_event_type
,))
return
data
,
count
async
def
detail_data_by_es
(
pid
,
create_time
):
async
def
detail_data_by_es
(
pid
,
create_time
):
...
...
unify_api/modules/scope_analyse/service/scope_analyse_service.py
View file @
e9a86df4
...
@@ -18,7 +18,7 @@ from pot_libs.common.components.responses import success_res
...
@@ -18,7 +18,7 @@ from pot_libs.common.components.responses import success_res
from
unify_api.utils.response_code
import
RET
from
unify_api.utils.response_code
import
RET
async
def
scope_record_service
(
cid
,
point_id
,
page_num
,
page_size
,
start
,
end
):
async
def
scope_record_service
(
cid
,
point_id
,
offset
,
limit
,
start
,
end
):
li
=
[
f
"cid={cid}"
]
li
=
[
f
"cid={cid}"
]
if
point_id
:
if
point_id
:
if
len
(
point_id
)
==
1
:
if
len
(
point_id
)
==
1
:
...
@@ -28,53 +28,50 @@ async def scope_record_service(cid, point_id, page_num, page_size, start, end):
...
@@ -28,53 +28,50 @@ async def scope_record_service(cid, point_id, page_num, page_size, start, end):
if
start
and
end
:
if
start
and
end
:
li
.
append
(
f
"create_time BETWEEN '{start}' and '{end}'"
)
li
.
append
(
f
"create_time BETWEEN '{start}' and '{end}'"
)
mid_sql
=
" and "
.
join
(
li
)
mid_sql
=
" and "
.
join
(
li
)
datas
=
await
scope_by_sql
(
mid_sql
)
datas
,
total
=
await
scope_by_sql
(
mid_sql
,
offset
,
limit
)
total
=
len
(
datas
)
# 获取监测点名称
# 获取监测点名称
point_dict
=
await
get_point_dict
(
cid
)
point_dict
=
await
get_point_dict
(
cid
)
# 动态漏电流阈值
# 动态漏电流阈值
rows
=
[]
rows
=
[]
if
datas
:
start
=
(
page_num
-
1
)
*
page_size
for
data
in
datas
:
datas
=
datas
[
start
:
start
+
page_num
]
# 漏电流
for
data
in
datas
:
if
data
[
"fault_type"
]
in
(
"over_res_cur"
,
"overResidualCurrent"
):
# 漏电流
probability
=
1
if
data
[
"fault_type"
]
in
(
"over_res_cur"
,
"overResidualCurrent"
):
record_type_name
=
"漏电流"
probability
=
1
reason
=
"漏电流越限"
record_type_name
=
"漏电流"
threhold
=
await
get_threhold
(
data
[
"cid"
],
data
[
"sid"
])
reason
=
"漏电流越限"
log
.
info
(
f
"scope_record_service threhold:{threhold}"
)
threhold
=
await
get_threhold
(
data
[
"cid"
],
data
[
"sid"
])
else
:
log
.
info
(
f
"scope_record_service threhold:{threhold}"
)
try
:
# 获取url地址数据,后续再接上
context
=
json
.
load
(
data
.
get
(
"url"
))
ctnum
=
2
if
"uab"
in
context
else
3
result
=
actionFile
(
context
,
ctnum
)
except
:
result
=
None
log
.
info
(
f
"actionFile:{result}"
)
if
isinstance
(
result
,
list
):
# record_type_name, probability, reason = result[0]
record_type_name
,
probability
,
_
=
result
[
0
]
probability
=
round
(
float
(
probability
),
4
)
else
:
else
:
try
:
# record_type_name, probability, reason = "不存在故障", "",
# 获取url地址数据,后续再接上
# result
context
=
json
.
load
(
data
.
get
(
"url"
))
record_type_name
,
probability
=
"不存在故障"
,
""
ctnum
=
2
if
"uab"
in
context
else
3
fault_type
=
await
get_trigger_params_dao
(
data
[
"fault_type"
])
result
=
actionFile
(
context
,
ctnum
)
reason
=
fault_type
.
get
(
"name"
)
except
:
dt
=
data
[
"create_time"
]
result
=
None
check_dt
=
time_format
.
convert_dt_to_timestr
(
dt
)
log
.
info
(
f
"actionFile:{result}"
)
check_timestamp
=
int
(
time_format
.
convert_dt_to_timestamp
(
dt
))
if
isinstance
(
result
,
list
):
sr
=
ScopeRecord
(
trigger_time
=
check_dt
,
point_id
=
data
[
"pid"
],
# record_type_name, probability, reason = result[0]
point_name
=
point_dict
.
get
(
data
[
"pid"
]),
record_type_name
,
probability
,
_
=
result
[
0
]
record_type
=
data
[
"fault_type"
],
probability
=
round
(
float
(
probability
),
4
)
record_type_name
=
record_type_name
,
else
:
probability
=
probability
,
reason
=
reason
,
# record_type_name, probability, reason = "不存在故障", "",
scope_id
=
"{}_{}"
.
format
(
data
[
"pid"
],
# result
check_timestamp
))
record_type_name
,
probability
=
"不存在故障"
,
""
rows
.
append
(
sr
)
fault_type
=
await
get_trigger_params_dao
(
data
[
"fault_type"
])
reason
=
fault_type
.
get
(
"name"
)
dt
=
data
[
"create_time"
]
check_dt
=
time_format
.
convert_dt_to_timestr
(
dt
)
check_timestamp
=
int
(
time_format
.
convert_dt_to_timestamp
(
dt
))
sr
=
ScopeRecord
(
trigger_time
=
check_dt
,
point_id
=
data
[
"pid"
],
point_name
=
point_dict
.
get
(
data
[
"pid"
]),
record_type
=
data
[
"fault_type"
],
record_type_name
=
record_type_name
,
probability
=
probability
,
reason
=
reason
,
scope_id
=
"{}_{}"
.
format
(
data
[
"pid"
],
check_timestamp
))
rows
.
append
(
sr
)
return
ScopeRecordResp
(
rows
=
rows
,
total
=
total
)
return
ScopeRecordResp
(
rows
=
rows
,
total
=
total
)
...
...
unify_api/modules/scope_analyse/views/scope_analyse.py
View file @
e9a86df4
...
@@ -14,7 +14,8 @@ async def post_scope_record(req, body: ScopeRecordReq) -> ScopeRecordResp:
...
@@ -14,7 +14,8 @@ async def post_scope_record(req, body: ScopeRecordReq) -> ScopeRecordResp:
end
=
body
.
end
end
=
body
.
end
page_size
=
body
.
page_size
page_size
=
body
.
page_size
page_num
=
body
.
page_num
page_num
=
body
.
page_num
return
await
scope_record_service
(
cid
,
point_id
,
page_num
,
page_size
,
return
await
scope_record_service
(
cid
,
point_id
,
(
page_num
-
1
)
*
page_size
,
page_size
,
start
,
end
)
start
,
end
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment