Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
U
unify_api2
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
chaonan
unify_api2
Commits
9b02f7be
Commit
9b02f7be
authored
Aug 22, 2022
by
lcn
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
修复bug
parent
81c24bd9
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
90 additions
and
54 deletions
+90
-54
location_temp_rcurrent.py
...y_api/modules/common/procedures/location_temp_rcurrent.py
+2
-0
syncretize_energy_es_dao.py
...y_api/modules/elec_charge/dao/syncretize_energy_es_dao.py
+28
-0
syncretize_energy_services.py
...modules/elec_charge/service/syncretize_energy_services.py
+60
-54
No files found.
unify_api/modules/common/procedures/location_temp_rcurrent.py
View file @
9b02f7be
...
@@ -156,6 +156,8 @@ async def location_stats_statics_new15(table_name, cid, start, end):
...
@@ -156,6 +156,8 @@ async def location_stats_statics_new15(table_name, cid, start, end):
location_map
=
{}
location_map
=
{}
async
with
MysqlUtil
()
as
conn
:
async
with
MysqlUtil
()
as
conn
:
locations
=
await
conn
.
fetchall
(
sql
,
args
=
(
cid
,
))
locations
=
await
conn
.
fetchall
(
sql
,
args
=
(
cid
,
))
if
not
locations
:
return
location_map
for
loca
in
locations
:
for
loca
in
locations
:
location_map
[
loca
[
"lid"
]]
=
loca
location_map
[
loca
[
"lid"
]]
=
loca
datas_sql
=
f
"SELECT * from {table_name} where lid in
%
s and create_time"
\
datas_sql
=
f
"SELECT * from {table_name} where lid in
%
s and create_time"
\
...
...
unify_api/modules/elec_charge/dao/syncretize_energy_es_dao.py
View file @
9b02f7be
from
pot_libs.es_util.es_utils
import
EsUtil
from
pot_libs.es_util.es_utils
import
EsUtil
from
pot_libs.mysql_util.mysql_util
import
MysqlUtil
from
pot_libs.utils.pendulum_wrapper
import
my_pendulum
from
pot_libs.utils.pendulum_wrapper
import
my_pendulum
from
unify_api.utils.time_format
import
get_start_end_by_tz_time_new
def
convert_es_str
(
str1
:
object
)
->
object
:
def
convert_es_str
(
str1
:
object
)
->
object
:
...
@@ -64,6 +66,20 @@ async def query_search_kwh_p(cid, start, end, interval):
...
@@ -64,6 +66,20 @@ async def query_search_kwh_p(cid, start, end, interval):
return
es_re
[
"aggregations"
][
"quarter_time"
][
"buckets"
]
return
es_re
[
"aggregations"
][
"quarter_time"
][
"buckets"
]
async
def
query_search_kwh_p_new15
(
cid
,
start
,
end
):
start
,
_
=
get_start_end_by_tz_time_new
(
start
)
_
,
end
=
get_start_end_by_tz_time_new
(
end
)
sql
=
f
"""
select p,kwh,create_time from company_15min_power
where cid =
%
s and create_time BETWEEN
%
s and
%
s
order by create_time asc
"""
async
with
MysqlUtil
()
as
conn
:
datas
=
await
conn
.
fetchall
(
sql
=
sql
,
args
=
(
cid
,
start
,
end
))
return
datas
async
def
query_spfv_price
(
cid
,
start
,
end
):
async
def
query_spfv_price
(
cid
,
start
,
end
):
query_body
=
{
query_body
=
{
"query"
:
{
"query"
:
{
...
@@ -103,3 +119,15 @@ async def query_spfv_price(cid, start, end):
...
@@ -103,3 +119,15 @@ async def query_spfv_price(cid, start, end):
es_re
=
await
es
.
search_origin
(
body
=
query_body
,
es_re
=
await
es
.
search_origin
(
body
=
query_body
,
index
=
"poweriot_company_15min_power"
)
index
=
"poweriot_company_15min_power"
)
return
es_re
[
"aggregations"
][
"charge"
][
"avg"
],
es_re
[
"aggregations"
][
"kwh"
][
"avg"
]
return
es_re
[
"aggregations"
][
"charge"
][
"avg"
],
es_re
[
"aggregations"
][
"kwh"
][
"avg"
]
async
def
query_spfv_price_new15
(
cid
,
start
,
end
):
start
,
_
=
get_start_end_by_tz_time_new
(
start
)
_
,
end
=
get_start_end_by_tz_time_new
(
end
)
sql
=
f
"""
select avg(charge) charge,avg(kwh) kwh from company_15min_power
where cid =
%
s and create_time BETWEEN
%
s and
%
s
"""
async
with
MysqlUtil
()
as
conn
:
data
=
await
conn
.
fetchone
(
sql
=
sql
,
args
=
(
cid
,
start
,
end
))
return
data
.
get
(
'charge'
,
0
),
data
.
get
(
'kwh'
,
0
)
unify_api/modules/elec_charge/service/syncretize_energy_services.py
View file @
9b02f7be
...
@@ -7,7 +7,7 @@ import pandas as pd
...
@@ -7,7 +7,7 @@ import pandas as pd
from
pot_libs.logger
import
log
from
pot_libs.logger
import
log
from
pot_libs.common.components.responses
import
success_res
from
pot_libs.common.components.responses
import
success_res
from
unify_api.modules.elec_charge.dao.syncretize_energy_es_dao
import
\
from
unify_api.modules.elec_charge.dao.syncretize_energy_es_dao
import
\
query_search_kwh_p
,
query_spfv_price
query_search_kwh_p
_new15
,
query_spfv_price_new15
from
unify_api.modules.elec_charge.dao.syncretize_energy_dao
import
\
from
unify_api.modules.elec_charge.dao.syncretize_energy_dao
import
\
get_annual_sunshine_hours
,
get_p
,
insert_price_policy_data_dao
,
\
get_annual_sunshine_hours
,
get_p
,
insert_price_policy_data_dao
,
\
inset_algorithm_power_factor_dao
,
get_elec_price_dao
,
\
inset_algorithm_power_factor_dao
,
get_elec_price_dao
,
\
...
@@ -43,10 +43,10 @@ async def pv_evaluate_service(cid, start, end):
...
@@ -43,10 +43,10 @@ async def pv_evaluate_service(cid, start, end):
elif
(
pendulum_end
-
pendulum_start
)
.
in_months
()
>
12
:
elif
(
pendulum_end
-
pendulum_start
)
.
in_months
()
>
12
:
return
success_res
(
code
=
4008
,
msg
=
"日期最多选择12个月"
)
return
success_res
(
code
=
4008
,
msg
=
"日期最多选择12个月"
)
else
:
else
:
# kwh_datas = await query_search_kwh_p(cid, f"{start}-01",
# kwh_datas = await query_search_kwh_p
_new15
(cid, f"{start}-01",
# f"{end}-{day_num}", "1h")
# f"{end}-{day_num}", "1h")
p_datas
=
await
query_search_kwh_p
(
cid
,
f
"{start}-01"
,
p_datas
=
await
query_search_kwh_p
_new15
(
cid
,
f
"{start}-01"
,
f
"{end}-{day_num}"
,
"15m"
)
f
"{end}-{day_num}"
)
if
not
p_datas
:
if
not
p_datas
:
return
PvEvaluateTwoResp
(
kwh_slot
=
[],
p_slot
=
[],
p
=
[],
kwh
=
[],
return
PvEvaluateTwoResp
(
kwh_slot
=
[],
p_slot
=
[],
p
=
[],
kwh
=
[],
electrovalence
=
{},
sum_kwh_p
=
""
,
electrovalence
=
{},
sum_kwh_p
=
""
,
...
@@ -56,18 +56,19 @@ async def pv_evaluate_service(cid, start, end):
...
@@ -56,18 +56,19 @@ async def pv_evaluate_service(cid, start, end):
kwh_slots
=
{
"
%02
d"
%
i
:
[]
for
i
in
range
(
24
)}
kwh_slots
=
{
"
%02
d"
%
i
:
[]
for
i
in
range
(
24
)}
num
,
flag
=
0
,
"00"
num
,
flag
=
0
,
"00"
for
data
in
p_datas
:
for
data
in
p_datas
:
if
data
[
"p"
][
"avg"
]:
create_time
=
data
[
"create_time"
]
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
p_slots
[
data
[
"key_as_string"
][
11
:
16
]]
.
append
(
data
[
"p"
][
"avg"
])
if
data
[
"p"
]:
if
data
[
"kwh"
][
"avg"
]:
p_slots
[
create_time
[
11
:
16
]]
.
append
(
data
[
"p"
])
if
flag
==
data
[
"key_as_string"
][
11
:
13
]:
if
data
[
"kwh"
]:
num
+=
data
[
"kwh"
][
"avg"
]
if
flag
==
create_time
[
11
:
13
]:
num
+=
data
[
"kwh"
]
else
:
else
:
kwh_slots
[
flag
]
.
append
(
num
)
kwh_slots
[
flag
]
.
append
(
num
)
num
=
data
[
"kwh"
]
[
"avg"
]
num
=
data
[
"kwh"
]
flag
=
data
[
"key_as_string"
]
[
11
:
13
]
flag
=
create_time
[
11
:
13
]
# for data in p_datas:
# for data in p_datas:
# if data["p"]
["avg"]
:
# if data["p"]:
# p_slots[
data["key_as_string"][11:16]].append(data["p"]["avg
"])
# p_slots[
create_time[11:16]].append(data["p
"])
for
key
,
value
in
kwh_slots
.
items
():
for
key
,
value
in
kwh_slots
.
items
():
kwh_slots
[
key
]
=
round
(
sum
(
value
)
/
len
(
value
),
2
)
if
value
else
""
kwh_slots
[
key
]
=
round
(
sum
(
value
)
/
len
(
value
),
2
)
if
value
else
""
# 获取峰时段
# 获取峰时段
...
@@ -135,14 +136,15 @@ async def pv_evaluate_compute_service(download=None, url=None, **body):
...
@@ -135,14 +136,15 @@ async def pv_evaluate_compute_service(download=None, url=None, **body):
{
"quarter_time"
:
list
(
p_slots
.
keys
()),
"pv_curve"
:
df_pv_curve
},
{
"quarter_time"
:
list
(
p_slots
.
keys
()),
"pv_curve"
:
df_pv_curve
},
columns
=
[
"quarter_time"
,
"pv_curve"
])
columns
=
[
"quarter_time"
,
"pv_curve"
])
# 获取电量和负荷15分钟数据
# 获取电量和负荷15分钟数据
datas
=
await
query_search_kwh_p
(
body
.
get
(
"cid"
),
datas
=
await
query_search_kwh_p
_new15
(
body
.
get
(
"cid"
),
f
"{body.get('start')}-01"
,
f
"{body.get('start')}-01"
,
f
"{body.get('end')}-{day_num}"
,
"15m"
)
f
"{body.get('end')}-{day_num}"
)
if
not
datas
:
if
not
datas
:
return
success_res
(
code
=
4008
,
msg
=
"未找到数据"
)
return
success_res
(
code
=
4008
,
msg
=
"未找到数据"
)
for
data
in
datas
:
for
data
in
datas
:
if
data
[
"p"
][
"avg"
]:
create_time
=
data
[
"create_time"
]
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
p_slots
[
data
[
"key_as_string"
][
11
:
19
]]
.
append
(
data
[
"p"
][
"avg"
])
if
data
[
"p"
]:
p_slots
[
create_time
[
11
:
19
]]
.
append
(
data
[
"p"
])
for
index
,
value
in
p_slots
.
items
():
for
index
,
value
in
p_slots
.
items
():
p_slots
[
index
]
=
sum
(
value
)
/
len
(
value
)
if
value
else
None
p_slots
[
index
]
=
sum
(
value
)
/
len
(
value
)
if
value
else
None
# 负荷曲线df_load
# 负荷曲线df_load
...
@@ -151,7 +153,7 @@ async def pv_evaluate_compute_service(download=None, url=None, **body):
...
@@ -151,7 +153,7 @@ async def pv_evaluate_compute_service(download=None, url=None, **body):
"load_curve"
:
list
(
p_slots
.
values
())},
"load_curve"
:
list
(
p_slots
.
values
())},
columns
=
[
"quarter_time"
,
"load_curve"
])
columns
=
[
"quarter_time"
,
"load_curve"
])
# 获取这段时间平均价格
# 获取这段时间平均价格
charge_price
,
kwh_price
=
await
query_spfv_price
(
body
.
get
(
"cid"
),
charge_price
,
kwh_price
=
await
query_spfv_price
_new15
(
body
.
get
(
"cid"
),
f
"{body.get('start')}-01"
,
f
"{body.get('start')}-01"
,
f
"{body.get('end')}-{day_num}"
)
f
"{body.get('end')}-{day_num}"
)
spfv_price
=
charge_price
/
kwh_price
if
charge_price
and
kwh_price
else
0
spfv_price
=
charge_price
/
kwh_price
if
charge_price
and
kwh_price
else
0
...
@@ -251,8 +253,8 @@ async def ess_evaluate_service(cid, start, end, work_day):
...
@@ -251,8 +253,8 @@ async def ess_evaluate_service(cid, start, end, work_day):
elec_list
=
[
i
for
i
in
re
.
findall
(
"p*"
,
elec
[
"quarters"
])
if
i
]
elec_list
=
[
i
for
i
in
re
.
findall
(
"p*"
,
elec
[
"quarters"
])
if
i
]
rule
=
2
if
len
(
elec_list
)
>
1
else
1
rule
=
2
if
len
(
elec_list
)
>
1
else
1
p_datas
=
await
query_search_kwh_p
(
cid
,
f
"{start}-01"
,
p_datas
=
await
query_search_kwh_p
_new15
(
cid
,
f
"{start}-01"
,
f
"{end}-{day_num}"
,
"15m"
)
f
"{end}-{day_num}"
)
if
not
p_datas
:
if
not
p_datas
:
return
PvEvaluateTwoResp
(
kwh_slot
=
[],
p_slot
=
[],
p
=
[],
kwh
=
[],
return
PvEvaluateTwoResp
(
kwh_slot
=
[],
p_slot
=
[],
p
=
[],
kwh
=
[],
electrovalence
=
{},
sum_kwh_p
=
""
,
electrovalence
=
{},
sum_kwh_p
=
""
,
...
@@ -264,40 +266,43 @@ async def ess_evaluate_service(cid, start, end, work_day):
...
@@ -264,40 +266,43 @@ async def ess_evaluate_service(cid, start, end, work_day):
# 1全部 2工作日 3非工作日
# 1全部 2工作日 3非工作日
if
work_day
==
2
:
if
work_day
==
2
:
for
data
in
p_datas
:
for
data
in
p_datas
:
if
ChineseCalendar
(
data
[
"key_as_string"
][:
10
])
.
is_workday
():
create_time
=
data
[
"create_time"
]
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
if
data
[
"p"
][
"avg"
]:
if
ChineseCalendar
(
create_time
[:
10
])
.
is_workday
():
p_slots
[
data
[
"key_as_string"
][
11
:
16
]]
.
append
(
data
[
"p"
][
"avg"
])
if
data
[
"p"
]:
if
data
[
"kwh"
][
"avg"
]:
p_slots
[
create_time
[
11
:
16
]]
.
append
(
data
[
"p"
])
if
flag
==
data
[
"key_as_string"
][
11
:
13
]:
if
data
[
"kwh"
]:
num
+=
data
[
"kwh"
][
"avg"
]
if
flag
==
create_time
[
11
:
13
]:
num
+=
data
[
"kwh"
]
else
:
else
:
kwh_slots
[
flag
]
.
append
(
num
)
kwh_slots
[
flag
]
.
append
(
num
)
num
=
data
[
"kwh"
]
[
"avg"
]
num
=
data
[
"kwh"
]
flag
=
data
[
"
key_as_string
"
][
11
:
13
]
flag
=
data
[
"
create_time
"
][
11
:
13
]
elif
work_day
==
3
:
elif
work_day
==
3
:
for
data
in
p_datas
:
for
data
in
p_datas
:
if
not
ChineseCalendar
(
data
[
"key_as_string"
][:
10
])
.
is_workday
():
create_time
=
data
[
"create_time"
]
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
if
data
[
"kwh"
][
"avg"
]:
if
not
ChineseCalendar
(
create_time
[:
10
])
.
is_workday
():
if
flag
==
data
[
"key_as_string"
][
11
:
13
]:
if
data
[
"kwh"
]:
num
+=
data
[
"kwh"
][
"avg"
]
if
flag
==
create_time
[
11
:
13
]:
num
+=
data
[
"kwh"
]
else
:
else
:
kwh_slots
[
flag
]
.
append
(
num
)
kwh_slots
[
flag
]
.
append
(
num
)
num
=
data
[
"kwh"
]
[
"avg"
]
num
=
data
[
"kwh"
]
flag
=
data
[
"key_as_string"
]
[
11
:
13
]
flag
=
create_time
[
11
:
13
]
if
data
[
"p"
]
[
"avg"
]
:
if
data
[
"p"
]:
p_slots
[
data
[
"key_as_string"
]
[
11
:
16
]]
.
append
(
p_slots
[
create_time
[
11
:
16
]]
.
append
(
data
[
"p"
]
[
"avg"
]
)
data
[
"p"
])
else
:
else
:
for
data
in
p_datas
:
for
data
in
p_datas
:
if
data
[
"p"
][
"avg"
]:
create_time
=
data
[
"create_time"
]
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
p_slots
[
data
[
"key_as_string"
][
11
:
16
]]
.
append
(
data
[
"p"
][
"avg"
])
if
data
[
"p"
]:
if
data
[
"kwh"
][
"avg"
]:
p_slots
[
create_time
[
11
:
16
]]
.
append
(
data
[
"p"
])
if
flag
==
data
[
"key_as_string"
][
11
:
13
]:
if
data
[
"kwh"
]:
num
+=
data
[
"kwh"
][
"avg"
]
if
flag
==
create_time
[
11
:
13
]:
num
+=
data
[
"kwh"
]
else
:
else
:
kwh_slots
[
flag
]
.
append
(
num
)
kwh_slots
[
flag
]
.
append
(
num
)
num
=
data
[
"kwh"
]
[
"avg"
]
num
=
data
[
"kwh"
]
flag
=
data
[
"key_as_string"
]
[
11
:
13
]
flag
=
create_time
[
11
:
13
]
for
key
,
value
in
kwh_slots
.
items
():
for
key
,
value
in
kwh_slots
.
items
():
kwh_slots
[
key
]
=
round
(
sum
(
value
)
/
len
(
value
),
2
)
if
value
else
""
kwh_slots
[
key
]
=
round
(
sum
(
value
)
/
len
(
value
),
2
)
if
value
else
""
# 获取峰时段
# 获取峰时段
...
@@ -372,9 +377,9 @@ async def ess_evaluate_compute_service(download=None, url=None, **body):
...
@@ -372,9 +377,9 @@ async def ess_evaluate_compute_service(download=None, url=None, **body):
max_demand_pmax
+=
demand
[
1
]
max_demand_pmax
+=
demand
[
1
]
max_demand_var
=
{
"flag"
:
max_demand_flag
,
"pmax"
:
max_demand_pmax
}
max_demand_var
=
{
"flag"
:
max_demand_flag
,
"pmax"
:
max_demand_pmax
}
# 获取电量和负荷15分钟数据
# 获取电量和负荷15分钟数据
datas
=
await
query_search_kwh_p
(
body
.
get
(
"cid"
),
datas
=
await
query_search_kwh_p
_new15
(
body
.
get
(
"cid"
),
f
"{body.get('start')}-01"
,
f
"{body.get('start')}-01"
,
f
"{body.get('end')}-{day_num}"
,
"15m"
)
f
"{body.get('end')}-{day_num}"
)
if
not
datas
:
if
not
datas
:
return
success_res
(
code
=
4008
,
msg
=
"未找到数据"
)
return
success_res
(
code
=
4008
,
msg
=
"未找到数据"
)
p_slots
=
{
"
%02
d:
%02
d:00"
%
(
i
,
j
):
[]
for
i
in
range
(
24
)
for
j
in
p_slots
=
{
"
%02
d:
%02
d:00"
%
(
i
,
j
):
[]
for
i
in
range
(
24
)
for
j
in
...
@@ -382,17 +387,18 @@ async def ess_evaluate_compute_service(download=None, url=None, **body):
...
@@ -382,17 +387,18 @@ async def ess_evaluate_compute_service(download=None, url=None, **body):
for
data
in
datas
:
for
data
in
datas
:
# 1全部 2工作日 3非工作日
# 1全部 2工作日 3非工作日
if
body
.
get
(
"work_day"
)
==
"2"
:
if
body
.
get
(
"work_day"
)
==
"2"
:
if
ChineseCalendar
(
data
[
"key_as_string"
][:
10
])
.
is_workday
()
and
data
[
"p"
][
"avg"
]:
create_time
=
data
[
"create_time"
]
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
p_slots
[
data
[
"key_as_string"
][
11
:
19
]]
.
append
(
if
ChineseCalendar
(
create_time
[:
10
])
.
is_workday
()
and
data
[
"p"
]:
data
[
"p"
][
"avg"
])
p_slots
[
create_time
[
11
:
19
]]
.
append
(
data
[
"p"
])
elif
body
.
get
(
"work_day"
)
==
"3"
:
elif
body
.
get
(
"work_day"
)
==
"3"
:
if
not
ChineseCalendar
(
if
not
ChineseCalendar
(
data
[
"key_as_string"
][:
10
])
.
is_workday
()
and
data
[
"p"
][
"avg
"
]:
create_time
[:
10
])
.
is_workday
()
and
data
[
"p
"
]:
p_slots
[
data
[
"key_as_string"
]
[
11
:
19
]]
.
append
(
p_slots
[
create_time
[
11
:
19
]]
.
append
(
data
[
"p"
]
[
"avg"
]
)
data
[
"p"
])
else
:
else
:
if
data
[
"p"
]
[
"avg"
]
:
if
data
[
"p"
]:
p_slots
[
data
[
"key_as_string"
][
11
:
19
]]
.
append
(
data
[
"p"
][
"avg
"
])
p_slots
[
create_time
[
11
:
19
]]
.
append
(
data
[
"p
"
])
for
index
,
value
in
p_slots
.
items
():
for
index
,
value
in
p_slots
.
items
():
p_slots
[
index
]
=
sum
(
value
)
/
len
(
value
)
if
value
else
None
p_slots
[
index
]
=
sum
(
value
)
/
len
(
value
)
if
value
else
None
# 负荷典型用电曲线 df_curve
# 负荷典型用电曲线 df_curve
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment