Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
U
unify_api2
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
chaonan
unify_api2
Commits
1cbb81b3
Commit
1cbb81b3
authored
Jun 15, 2023
by
ZZH
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
fix real power2023-6-15
parent
8fae28aa
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
27 additions
and
24 deletions
+27
-24
electric_dao.py
unify_api/modules/electric/dao/electric_dao.py
+12
-0
count_info_pds.py
unify_api/modules/home_page/procedures/count_info_pds.py
+15
-24
No files found.
unify_api/modules/electric/dao/electric_dao.py
View file @
1cbb81b3
from
pot_libs.mysql_util.mysql_util
import
MysqlUtil
from
pot_libs.mysql_util.mysql_util
import
MysqlUtil
from
pot_libs.settings
import
SETTING
async
def
monitor_point_join_by_points
(
points
):
async
def
monitor_point_join_by_points
(
points
):
...
@@ -71,3 +72,14 @@ async def get_elec_mtid_sid_by_cid(cid):
...
@@ -71,3 +72,14 @@ async def get_elec_mtid_sid_by_cid(cid):
async
with
MysqlUtil
()
as
conn
:
async
with
MysqlUtil
()
as
conn
:
datas
=
await
conn
.
fetchall
(
sql
,
args
=
(
cid_tuple
,))
datas
=
await
conn
.
fetchall
(
sql
,
args
=
(
cid_tuple
,))
return
datas
if
datas
else
[]
return
datas
if
datas
else
[]
async
def
load_add_to_compy_ids
(
cid
):
db
=
SETTING
.
mysql_db
sql
=
f
"SELECT monitor.mtid, monitor.sid FROM {db}.monitor "
\
f
"INNER JOIN {db}.point ON point.mtid=monitor.mtid "
\
f
"WHERE monitor.cid=
%
s AND point.add_to_company=1 "
\
f
"AND monitor.demolished=0;"
async
with
MysqlUtil
()
as
conn
:
ids
=
await
conn
.
fetchall
(
sql
,
(
cid
,))
return
ids
if
ids
else
[]
unify_api/modules/home_page/procedures/count_info_pds.py
View file @
1cbb81b3
...
@@ -4,8 +4,7 @@ from datetime import datetime, timedelta
...
@@ -4,8 +4,7 @@ from datetime import datetime, timedelta
import
pendulum
import
pendulum
from
pot_libs.settings
import
SETTING
from
pot_libs.settings
import
SETTING
from
unify_api.modules.electric.dao.electric_dao
import
\
from
unify_api.modules.electric.dao.electric_dao
import
load_add_to_compy_ids
get_elec_mtid_sid_by_cid
from
unify_api.utils.common_utils
import
round_2
from
unify_api.utils.common_utils
import
round_2
from
pot_libs.aredis_util.aredis_utils
import
RedisUtils
from
pot_libs.aredis_util.aredis_utils
import
RedisUtils
from
pot_libs.logger
import
log
from
pot_libs.logger
import
log
...
@@ -34,6 +33,7 @@ from unify_api.modules.electric_optimization.dao.power_index import (
...
@@ -34,6 +33,7 @@ from unify_api.modules.electric_optimization.dao.power_index import (
)
)
from
unify_api.utils.taos_new
import
parse_td_columns
,
get_td_table_name
,
\
from
unify_api.utils.taos_new
import
parse_td_columns
,
get_td_table_name
,
\
td3_tbl_compate
,
get_td_engine_data
td3_tbl_compate
,
get_td_engine_data
from
unify_api.utils.time_format
import
CST
async
def
other_info
(
cid
):
async
def
other_info
(
cid
):
...
@@ -205,34 +205,25 @@ async def normal_rate_of_location(cid):
...
@@ -205,34 +205,25 @@ async def normal_rate_of_location(cid):
async
def
real_time_load
(
cid
,
end_dt
=
None
):
async
def
real_time_load
(
cid
,
end_dt
=
None
):
"""实时负荷"""
"""实时负荷"""
datas
=
await
get_elec_mtid_sid_by_cid
(
cid
)
td_tbls
=
[]
td_mt_tables
=
tuple
(
for
item
in
await
load_add_to_compy_ids
(
cid
):
(
get_td_table_name
(
"electric"
,
data
[
"mtid"
])
for
data
in
datas
if
mtid
,
sid
=
item
[
"mtid"
],
item
[
"sid"
]
data
[
"mtid"
]))
tbl
=
get_td_table_name
(
"electric"
,
mtid
)
td_mt_tables
=
td3_tbl_compate
(
td_mt_tables
)
td_tbls
.
append
(
tbl
)
td_tbls
.
append
(
f
"s_{sid.lower()}_e"
)
td_mt_tables
=
td3_tbl_compate
(
td_tbls
)
if
not
end_dt
:
if
not
end_dt
:
end_dt
=
pendulum
.
now
(
tz
=
"Asia/Shanghai"
)
end_dt
=
pendulum
.
now
(
tz
=
CST
)
start_dt
=
end_dt
.
subtract
(
minutes
=
2
)
s_dt
=
end_dt
.
subtract
(
minutes
=
15
)
sql
=
f
"SELECT last_row(mdptime, pttl) FROM electric_stb "
\
sql
=
f
"select last_row(mdptime, pttl) from electric_stb "
\
f
"WHERE TBNAME IN {td_mt_tables} "
\
f
"where TBNAME IN {td_mt_tables} and ts>='{str(start_dt)}' and ts "
\
f
"AND ts>='{str(s_dt)}' AND ts <='{str(end_dt)}' group by tbname;"
f
"<='{str(end_dt)}' group by tbname"
url
=
f
"{SETTING.stb_url}db_electric?tz=Asia/Shanghai"
url
=
f
"{SETTING.stb_url}db_electric?tz=Asia/Shanghai"
is_succ
,
results
=
await
get_td_engine_data
(
url
,
sql
)
is_succ
,
results
=
await
get_td_engine_data
(
url
,
sql
)
if
not
is_succ
:
if
not
is_succ
:
return
""
return
""
if
not
results
[
"data"
]:
# 兼容:mt表(2.0架构)里面拿不到数据再从sid表(1.0架构)里面拿
td_s_tables
=
tuple
(
(
f
"s{data['sid'].lower()}_e"
for
data
in
datas
if
data
[
"sid"
]))
td_s_tables
=
td3_tbl_compate
(
td_s_tables
)
sql
=
f
"select last_row(mdptime, pttl) from electric_stb "
\
f
"where TBNAME IN {td_s_tables} group by tbname"
is_succ
,
results
=
await
get_td_engine_data
(
url
,
sql
)
if
not
is_succ
:
return
""
head
=
parse_td_columns
(
results
)
head
=
parse_td_columns
(
results
)
datas
=
[]
datas
=
[]
for
res
in
results
[
"data"
]:
for
res
in
results
[
"data"
]:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment