Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
U
unify_api2
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
chaonan
unify_api2
Commits
984a952d
Commit
984a952d
authored
Jun 02, 2023
by
ZZH
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
remove es 2023-6-2
parent
0bc47465
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
65 additions
and
316 deletions
+65
-316
pttl_max.py
unify_api/modules/common/procedures/pttl_max.py
+3
-149
syncretize_energy_es_dao.py
...y_api/modules/elec_charge/dao/syncretize_energy_es_dao.py
+3
-109
syncretize_energy_services.py
...modules/elec_charge/service/syncretize_energy_services.py
+50
-43
elec_statistics.py
unify_api/modules/elec_charge/views/elec_statistics.py
+3
-4
count_info_pds.py
unify_api/modules/home_page/procedures/count_info_pds.py
+2
-4
load_distribution.py
unify_api/modules/load_analysis/views/load_distribution.py
+4
-7
No files found.
unify_api/modules/common/procedures/pttl_max.py
View file @
984a952d
from
pot_libs.es_util.es_utils
import
EsUtil
from
pot_libs.logger
import
log
from
pot_libs.mysql_util.mysql_util
import
MysqlUtil
from
unify_api.constants
import
POINT_15MIN_INDEX
,
INDEX
from
unify_api.utils.es_query_body
import
EsQueryBody
from
unify_api.utils.time_format
import
power_slots
,
range_to_type
from
unify_api.utils.time_format
import
range_to_type
async
def
pttl_max
(
cid
,
start
,
end
,
point_id
=
None
,
inline_id
=
None
):
# 根据进线,找point
if
inline_id
:
sql
=
"SELECT pid from `point` WHERE cid =
%
s "
\
"and inlid_belongedto =
%
s and add_to_company = 1"
async
with
MysqlUtil
()
as
conn
:
point_info
=
await
conn
.
fetchall
(
sql
=
sql
,
args
=
(
cid
,
inline_id
))
point_list
=
[
point
.
get
(
"pid"
)
for
point
in
point_info
]
terms
=
{
"pid"
:
point_list
}
elif
point_id
==
-
1
:
# 选的全部
# 1.找出工厂所有pid,point表add_to_company字段为1
sql
=
"SELECT pid from `point` WHERE cid =
%
s "
\
"and add_to_company = 1"
async
with
MysqlUtil
()
as
conn
:
point_info
=
await
conn
.
fetchall
(
sql
=
sql
,
args
=
(
cid
,))
point_list
=
[
point
.
get
(
"pid"
)
for
point
in
point_info
]
terms
=
{
"pid"
:
point_list
}
else
:
terms
=
{
"pid"
:
[
point_id
]}
# 1. 根据时间范围,取不同的index
date_type
=
range_to_type
(
start
,
end
)
index
=
INDEX
[
date_type
]
if
date_type
==
"day"
:
date_key
=
"hour"
elif
date_type
==
"month"
:
date_key
=
"day"
else
:
date_key
=
"month"
# 2. 构造query_body
eqb
=
EsQueryBody
(
terms
=
terms
,
start
=
start
,
end
=
end
,
date_key
=
date_key
)
query
=
eqb
.
query
()
query
[
"aggs"
]
=
{
"time_column"
:
{
"date_histogram"
:
{
"field"
:
date_key
,
"interval"
:
date_key
,
"time_zone"
:
"+08:00"
,
"format"
:
"yyyy-MM-dd HH:mm"
},
"aggs"
:
{
"pttl_max"
:
{
"sum"
:
{
"field"
:
"pttl_max"
}
}
}
}
}
log
.
info
(
index
+
f
"====={query}"
)
async
with
EsUtil
()
as
es
:
es_re
=
await
es
.
search_origin
(
body
=
query
,
index
=
index
)
if
not
es_re
[
"aggregations"
][
"time_column"
][
"buckets"
]:
return
""
,
""
# 2.返回
es_re
=
es_re
[
"aggregations"
][
"time_column"
][
"buckets"
]
# 最大需量
max_val
=
0
max_val_time
=
""
for
res
in
es_re
:
mdp_max_value
=
res
[
"pttl_max"
][
"value"
]
if
mdp_max_value
and
mdp_max_value
>
max_val
:
max_val
=
mdp_max_value
max_val_time
=
res
[
"key_as_string"
]
# 根据时间范围, 返回不同时间格式
if
max_val_time
:
if
date_type
==
"day"
:
max_val_time
=
max_val_time
.
split
(
" "
)[
1
]
elif
date_type
==
"month"
:
max_val_time
=
max_val_time
.
split
(
"-"
,
1
)[
1
]
.
split
(
" "
)[
0
]
else
:
max_val_time
=
max_val_time
[:
7
]
return
max_val
,
max_val_time
async
def
pttl_max_new15
(
cid
,
start
,
end
,
point_id
=
None
,
inline_id
=
None
):
async
def
load_pttl_max
(
cid
,
start
,
end
,
point_id
=
None
,
inline_id
=
None
):
# 根据进线,找point
if
inline_id
:
sql
=
"SELECT pid from `point` WHERE cid =
%
s "
\
...
...
@@ -140,73 +60,7 @@ async def pttl_max_new15(cid, start, end, point_id=None, inline_id=None):
return
max_val
,
max_val_time
async
def
pttl_max_15min
(
cid
,
start
,
end
,
point_id
=
None
,
inline_id
=
None
):
"""负荷分布,最高负荷需要拿15min"""
# 根据进线,找point
if
inline_id
:
sql
=
"SELECT pid from `point` WHERE cid =
%
s "
\
"and inlid_belongedto =
%
s"
async
with
MysqlUtil
()
as
conn
:
point_info
=
await
conn
.
fetchall
(
sql
=
sql
,
args
=
(
cid
,
inline_id
))
point_list
=
[
point
.
get
(
"pid"
)
for
point
in
point_info
]
terms
=
{
"pid"
:
point_list
}
elif
point_id
==
-
1
:
# 选的全部
# 1.找出工厂所有pid,point表add_to_company字段为1
sql
=
"SELECT pid from `point` WHERE cid =
%
s"
async
with
MysqlUtil
()
as
conn
:
point_info
=
await
conn
.
fetchall
(
sql
=
sql
,
args
=
(
cid
,))
point_list
=
[
point
.
get
(
"pid"
)
for
point
in
point_info
]
terms
=
{
"pid"
:
point_list
}
else
:
terms
=
{
"pid"
:
[
point_id
]}
# 1. 根据时间范围,取不同的index
index
=
POINT_15MIN_INDEX
date_key
=
"quarter_time"
interval
=
"15m"
# 2. 构造query_body
eqb
=
EsQueryBody
(
terms
=
terms
,
start
=
start
,
end
=
end
,
date_key
=
date_key
)
query
=
eqb
.
query
()
query
[
"aggs"
]
=
{
"time_column"
:
{
"date_histogram"
:
{
"field"
:
date_key
,
"interval"
:
interval
,
"time_zone"
:
"+08:00"
,
"format"
:
"yyyy-MM-dd HH:mm"
},
"aggs"
:
{
"pttl_max"
:
{
"sum"
:
{
"field"
:
"pttl_max"
}
}
}
}
}
log
.
info
(
index
+
f
"====={query}"
)
async
with
EsUtil
()
as
es
:
es_re
=
await
es
.
search_origin
(
body
=
query
,
index
=
index
)
if
not
es_re
[
"aggregations"
][
"time_column"
][
"buckets"
]:
return
""
,
""
# 2.返回
es_re
=
es_re
[
"aggregations"
][
"time_column"
][
"buckets"
]
# 最大需量
max_val
=
0
max_val_time
=
""
for
res
in
es_re
:
mdp_max_value
=
res
[
"pttl_max"
][
"value"
]
if
mdp_max_value
and
mdp_max_value
>
max_val
:
max_val
=
mdp_max_value
max_val_time
=
res
[
"key_as_string"
]
# 根据时间范围, 返回不同时间格式
if
max_val_time
:
max_val_time
=
max_val_time
[
5
:]
return
max_val
,
max_val_time
async
def
pttl_max_15min_new15
(
cid
,
start
,
end
,
point_id
=
None
,
inline_id
=
None
):
async
def
load_pttl_max_15min
(
cid
,
start
,
end
,
point_id
=
None
,
inline_id
=
None
):
if
inline_id
:
sql
=
"SELECT pid from `point` WHERE cid =
%
s and inlid =
%
s"
async
with
MysqlUtil
()
as
conn
:
...
...
unify_api/modules/elec_charge/dao/syncretize_energy_es_dao.py
View file @
984a952d
from
pot_libs.es_util.es_utils
import
EsUtil
from
pot_libs.mysql_util.mysql_util
import
MysqlUtil
from
pot_libs.utils.pendulum_wrapper
import
my_pendulum
from
unify_api.utils.time_format
import
get_start_end_by_tz_time_new
def
convert_es_str
(
str1
:
object
)
->
object
:
"""str date转换为str es日期格式"""
es_date
=
my_pendulum
.
from_format
(
str1
,
'YYYY-MM-DD'
)
return
str
(
es_date
)
async
def
query_search_kwh_p
(
cid
,
start
,
end
,
interval
):
query_body
=
{
"query"
:
{
"bool"
:
{
"filter"
:
[
{
"term"
:
{
"cid"
:
cid
}
},
{
"range"
:
{
"quarter_time"
:
{
"gte"
:
convert_es_str
(
start
),
"lte"
:
convert_es_str
(
end
)
}
}
}
]
}
},
"aggs"
:
{
"quarter_time"
:
{
"date_histogram"
:
{
"field"
:
"quarter_time"
,
"interval"
:
interval
,
"time_zone"
:
"+08:00"
,
"format"
:
"yyyy-MM-dd HH:mm:ss"
},
"aggs"
:
{
"p"
:
{
"stats"
:
{
"field"
:
"p"
}
},
"kwh"
:
{
"stats"
:
{
"field"
:
"kwh"
}
}
}
}
},
"sort"
:
[
{
"quarter_time"
:
{
"order"
:
"asc"
}
}
]
}
async
with
EsUtil
()
as
es
:
es_re
=
await
es
.
search_origin
(
body
=
query_body
,
index
=
"poweriot_company_15min_power"
)
return
es_re
[
"aggregations"
][
"quarter_time"
][
"buckets"
]
async
def
query_search_kwh_p_new15
(
cid
,
start
,
end
):
async
def
load_compy_power
(
cid
,
start
,
end
):
start
,
_
=
get_start_end_by_tz_time_new
(
start
)
_
,
end
=
get_start_end_by_tz_time_new
(
end
)
sql
=
f
"""
...
...
@@ -75,53 +11,11 @@ async def query_search_kwh_p_new15(cid, start, end):
order by create_time asc
"""
async
with
MysqlUtil
()
as
conn
:
datas
=
await
conn
.
fetchall
(
sql
=
sql
,
args
=
(
cid
,
start
,
end
))
datas
=
await
conn
.
fetchall
(
sql
=
sql
,
args
=
(
cid
,
start
,
end
))
return
datas
async
def
query_spfv_price
(
cid
,
start
,
end
):
query_body
=
{
"query"
:
{
"bool"
:
{
"filter"
:
[
{
"term"
:
{
"cid"
:
cid
}
},
{
"range"
:
{
"quarter_time"
:
{
"gte"
:
convert_es_str
(
start
),
"lte"
:
convert_es_str
(
end
)
}
}
}
]
}
},
"aggs"
:
{
"charge"
:
{
"stats"
:
{
"field"
:
"charge"
}
},
"kwh"
:
{
"stats"
:
{
"field"
:
"kwh"
}
}
}
}
async
with
EsUtil
()
as
es
:
es_re
=
await
es
.
search_origin
(
body
=
query_body
,
index
=
"poweriot_company_15min_power"
)
return
es_re
[
"aggregations"
][
"charge"
][
"avg"
],
es_re
[
"aggregations"
][
"kwh"
][
"avg"
]
async
def
query_spfv_price_new15
(
cid
,
start
,
end
):
async
def
load_spfv_price
(
cid
,
start
,
end
):
start
,
_
=
get_start_end_by_tz_time_new
(
start
)
_
,
end
=
get_start_end_by_tz_time_new
(
end
)
sql
=
f
"""
...
...
unify_api/modules/elec_charge/service/syncretize_energy_services.py
View file @
984a952d
...
...
@@ -7,10 +7,10 @@ import pandas as pd
from
pot_libs.logger
import
log
from
pot_libs.common.components.responses
import
success_res
from
unify_api.modules.elec_charge.dao.syncretize_energy_es_dao
import
\
query_search_kwh_p_new15
,
query_spfv_price_new15
load_compy_power
,
load_spfv_price
from
unify_api.modules.elec_charge.dao.syncretize_energy_dao
import
\
get_annual_sunshine_hours
,
get_p
,
insert_price_policy_data_dao
,
\
inset_algorithm_power_factor_dao
,
get_elec_price_dao
,
\
inset_algorithm_power_factor_dao
,
get_elec_price_dao
,
\
get_algorithm_power_factor_dao
,
get_max_demand_by_inlid
from
unify_api.modules.elec_charge.components.syncretize_energy_cps
import
\
PvEvaluateComputeResp
,
Optimizecurve
,
ElectrovalenceResp
,
\
...
...
@@ -33,7 +33,7 @@ async def pv_evaluate_service(cid, start, end):
start_list
=
start
.
split
(
"-"
)
end_list
=
end
.
split
(
"-"
)
pendulum_start
=
pendulum
.
date
(
int
(
start_list
[
0
]),
int
(
start_list
[
1
]),
1
)
int
(
start_list
[
1
]),
1
)
pendulum_end
=
pendulum
.
date
(
int
(
end_list
[
0
]),
int
(
end_list
[
1
]),
1
)
day_num
=
pendulum_end
.
days_in_month
except
:
...
...
@@ -43,10 +43,9 @@ async def pv_evaluate_service(cid, start, end):
elif
(
pendulum_end
-
pendulum_start
)
.
in_months
()
>
12
:
return
success_res
(
code
=
4008
,
msg
=
"日期最多选择12个月"
)
else
:
# kwh_datas = await query_search_kwh_p_new15(cid, f"{start}-01",
# f"{end}-{day_num}", "1h")
p_datas
=
await
query_search_kwh_p_new15
(
cid
,
f
"{start}-01"
,
f
"{end}-{day_num}"
)
p_datas
=
await
load_compy_power
(
cid
,
f
"{start}-01"
,
f
"{end}-{day_num}"
)
if
not
p_datas
:
return
PvEvaluateTwoResp
(
kwh_slot
=
[],
p_slot
=
[],
p
=
[],
kwh
=
[],
electrovalence
=
{},
sum_kwh_p
=
""
,
...
...
@@ -66,11 +65,10 @@ async def pv_evaluate_service(cid, start, end):
kwh_slots
[
flag
]
.
append
(
num
)
num
=
data
[
"kwh"
]
flag
=
create_time
[
11
:
13
]
# for data in p_datas:
# if data["p"]:
# p_slots[create_time[11:16]].append(data["p"])
for
key
,
value
in
kwh_slots
.
items
():
kwh_slots
[
key
]
=
round
(
sum
(
value
)
/
len
(
value
),
2
)
if
value
else
""
kwh_slots
[
key
]
=
round
(
sum
(
value
)
/
len
(
value
),
2
)
if
value
else
""
# 获取峰时段
elec_price
=
await
get_elec_price_dao
(
cid
)
if
not
elec_price
:
...
...
@@ -88,7 +86,8 @@ async def pv_evaluate_service(cid, start, end):
p_slot
=
[
slot
for
slot
in
p_slots
],
electrovalence
=
elecs
,
sum_kwh_p
=
round
(
sum_kwh_p
,
2
),
p
=
[
round
(
sum
(
p
)
/
len
(
p
),
2
)
if
p
else
""
for
p
in
p_slots
.
values
()],
p
=
[
round
(
sum
(
p
)
/
len
(
p
),
2
)
if
p
else
""
for
p
in
p_slots
.
values
()],
kwh
=
list
(
kwh_slots
.
values
()))
...
...
@@ -108,7 +107,8 @@ async def pv_evaluate_compute_service(download=None, url=None, **body):
total_capacity
=
float
(
body
.
get
(
"install_space"
))
*
\
float
(
body
.
get
(
"area_conversion_ratio"
))
# 工厂容量 =屋顶面积*折算系数*单位面积容量
invest_capacity
=
total_capacity
*
body
.
get
(
"capacity_per_meter"
)
/
1000
invest_capacity
=
total_capacity
*
body
.
get
(
"capacity_per_meter"
)
/
1000
if
not
invest_capacity
:
return
success_res
(
code
=
4008
,
msg
=
"场地面积/面积折算系数/单位面积容量不能为0"
)
except
:
...
...
@@ -136,9 +136,9 @@ async def pv_evaluate_compute_service(download=None, url=None, **body):
{
"quarter_time"
:
list
(
p_slots
.
keys
()),
"pv_curve"
:
df_pv_curve
},
columns
=
[
"quarter_time"
,
"pv_curve"
])
# 获取电量和负荷15分钟数据
datas
=
await
query_search_kwh_p_new15
(
body
.
get
(
"cid"
),
f
"{body.get('start')}-01"
,
f
"{body.get('end')}-{day_num}"
)
datas
=
await
load_compy_power
(
body
.
get
(
"cid"
),
f
"{body.get('start')}-01"
,
f
"{body.get('end')}-{day_num}"
)
if
not
datas
:
return
success_res
(
code
=
4008
,
msg
=
"未找到数据"
)
for
data
in
datas
:
...
...
@@ -146,21 +146,21 @@ async def pv_evaluate_compute_service(download=None, url=None, **body):
if
data
[
"p"
]:
p_slots
[
create_time
[
11
:
19
]]
.
append
(
data
[
"p"
])
for
index
,
value
in
p_slots
.
items
():
p_slots
[
index
]
=
sum
(
value
)
/
len
(
value
)
if
value
else
None
p_slots
[
index
]
=
sum
(
value
)
/
len
(
value
)
if
value
else
None
# 负荷曲线df_load
df_load
=
pd
.
DataFrame
(
{
"quarter_time"
:
list
(
p_slots
.
keys
()),
"load_curve"
:
list
(
p_slots
.
values
())},
columns
=
[
"quarter_time"
,
"load_curve"
])
df_load
=
pd
.
DataFrame
({
"quarter_time"
:
list
(
p_slots
.
keys
()),
"load_curve"
:
list
(
p_slots
.
values
())},
columns
=
[
"quarter_time"
,
"load_curve"
])
# 获取这段时间平均价格
charge_price
,
kwh_price
=
await
query_spfv_price_new15
(
body
.
get
(
"cid"
),
f
"{body.get('start')}-01"
,
f
"{body.get('end')}-{day_num}"
)
spfv_price
=
charge_price
/
kwh_price
if
charge_price
and
kwh_price
else
0
charge_price
,
kwh_price
=
await
load_spfv_price
(
body
.
get
(
"cid"
),
f
"{body.get('start')}-01"
,
f
"{body.get('end')}-{day_num}"
)
spfv_price
=
charge_price
/
kwh_price
if
charge_price
and
kwh_price
else
0
pv_system
=
{
"user_type"
:
"工商业"
,
# 建筑类型
"install_space"
:
body
.
get
(
"install_space"
),
# 屋顶面积m2
"area_conversion_ratio"
:
body
.
get
(
"area_conversion_ratio"
),
# 面积折算系数
"area_conversion_ratio"
:
body
.
get
(
"area_conversion_ratio"
),
# 面积折算系数
"capacity_per_meter"
:
body
.
get
(
"capacity_per_meter"
),
# 单位面积容量
"self_use_ratio"
:
body
.
get
(
"self_use_ratio"
),
# 自发自用比例
"efficiency"
:
body
.
get
(
"efficiency"
),
# 发电效率
...
...
@@ -183,7 +183,9 @@ async def pv_evaluate_compute_service(download=None, url=None, **body):
obj
=
PvEvaluateTool
(
pv_system
,
price
,
invest_capacity
,
df_load
,
df_pv
)
obj
.
output
()
# 测算表
evaluate_table
=
(
obj
.
evaluate_table
.
where
(
obj
.
evaluate_table
.
notnull
(),
None
))
.
round
(
2
)
evaluate_table
=
(
obj
.
evaluate_table
.
where
(
obj
.
evaluate_table
.
notnull
(),
None
))
.
round
(
2
)
# 下载
if
download
:
company
=
await
select_cname_by_cid
(
body
.
get
(
"cid"
))
...
...
@@ -253,8 +255,8 @@ async def ess_evaluate_service(cid, start, end, work_day):
elec_list
=
[
i
for
i
in
re
.
findall
(
"p*"
,
elec
[
"quarters"
])
if
i
]
rule
=
2
if
len
(
elec_list
)
>
1
else
1
p_datas
=
await
query_search_kwh_p_new15
(
cid
,
f
"{start}-01"
,
f
"{end}-{day_num}"
)
p_datas
=
await
load_compy_power
(
cid
,
f
"{start}-01"
,
f
"{end}-{day_num}"
)
if
not
p_datas
:
return
PvEvaluateTwoResp
(
kwh_slot
=
[],
p_slot
=
[],
p
=
[],
kwh
=
[],
electrovalence
=
{},
sum_kwh_p
=
""
,
...
...
@@ -331,7 +333,8 @@ async def ess_evaluate_service(cid, start, end, work_day):
electrovalence
=
elecs
,
sum_kwh_p
=
round
(
sum_kwh_p
,
2
),
sum_kwh_s
=
sum_kwh_s
,
p
=
[
round
(
sum
(
p
)
/
len
(
p
),
2
)
if
p
else
""
for
p
in
p_slots
.
values
()],
p
=
[
round
(
sum
(
p
)
/
len
(
p
),
2
)
if
p
else
""
for
p
in
p_slots
.
values
()],
kwh
=
list
(
kwh_slots
.
values
()))
...
...
@@ -377,9 +380,9 @@ async def ess_evaluate_compute_service(download=None, url=None, **body):
max_demand_pmax
+=
demand
[
1
]
max_demand_var
=
{
"flag"
:
max_demand_flag
,
"pmax"
:
max_demand_pmax
}
# 获取电量和负荷15分钟数据
datas
=
await
query_search_kwh_p_new15
(
body
.
get
(
"cid"
),
f
"{body.get('start')}-01"
,
f
"{body.get('end')}-{day_num}"
)
datas
=
await
load_compy_power
(
body
.
get
(
"cid"
),
f
"{body.get('start')}-01"
,
f
"{body.get('end')}-{day_num}"
)
if
not
datas
:
return
success_res
(
code
=
4008
,
msg
=
"未找到数据"
)
p_slots
=
{
"
%02
d:
%02
d:00"
%
(
i
,
j
):
[]
for
i
in
range
(
24
)
for
j
in
...
...
@@ -388,7 +391,8 @@ async def ess_evaluate_compute_service(download=None, url=None, **body):
# 1全部 2工作日 3非工作日
if
body
.
get
(
"work_day"
)
==
"2"
:
create_time
=
data
[
"create_time"
]
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
if
ChineseCalendar
(
create_time
[:
10
])
.
is_workday
()
and
data
[
"p"
]:
if
ChineseCalendar
(
create_time
[:
10
])
.
is_workday
()
and
data
[
"p"
]:
p_slots
[
create_time
[
11
:
19
]]
.
append
(
data
[
"p"
])
elif
body
.
get
(
"work_day"
)
==
"3"
:
...
...
@@ -400,7 +404,7 @@ async def ess_evaluate_compute_service(download=None, url=None, **body):
if
data
[
"p"
]:
p_slots
[
create_time
[
11
:
19
]]
.
append
(
data
[
"p"
])
for
index
,
value
in
p_slots
.
items
():
p_slots
[
index
]
=
sum
(
value
)
/
len
(
value
)
if
value
else
None
p_slots
[
index
]
=
sum
(
value
)
/
len
(
value
)
if
value
else
None
# 负荷典型用电曲线 df_curve
df_curve
=
pd
.
DataFrame
(
{
"quarter_time"
:
list
(
p_slots
.
keys
()),
...
...
@@ -409,11 +413,11 @@ async def ess_evaluate_compute_service(download=None, url=None, **body):
df_curve
.
loc
[:,
"quarter_time"
]
=
pd
.
to_datetime
(
df_curve
.
loc
[:,
"quarter_time"
])
if
elec_prices
.
get
(
"price_s"
)
and
section_time_range
.
get
(
"s"
):
peak_valley_price
=
elec_prices
[
"price_s"
]
-
elec_prices
[
"price_v"
]
peak_flat_price
=
elec_prices
[
"price_s"
]
-
elec_prices
[
"price_f"
]
peak_valley_price
=
elec_prices
[
"price_s"
]
-
elec_prices
[
"price_v"
]
peak_flat_price
=
elec_prices
[
"price_s"
]
-
elec_prices
[
"price_f"
]
else
:
peak_valley_price
=
elec_prices
[
"price_p"
]
-
elec_prices
[
"price_v"
]
peak_flat_price
=
elec_prices
[
"price_p"
]
-
elec_prices
[
"price_f"
]
peak_valley_price
=
elec_prices
[
"price_p"
]
-
elec_prices
[
"price_v"
]
peak_flat_price
=
elec_prices
[
"price_p"
]
-
elec_prices
[
"price_f"
]
log
.
info
(
f
"cid:{body.get('cid')}, 峰谷价差:{peak_valley_price}, "
f
"峰平价差:{peak_flat_price}"
)
price
=
{
...
...
@@ -445,7 +449,8 @@ async def ess_evaluate_compute_service(download=None, url=None, **body):
"DOD"
:
body
.
get
(
"DOD"
),
# 放电深度
"decay_rate"
:
body
.
get
(
"decay_rate"
),
# 衰减率
# 年运维费用占静态投资额比例
"maintenance_ratio_per_year"
:
body
.
get
(
"maintenance_ratio_per_year"
),
"maintenance_ratio_per_year"
:
body
.
get
(
"maintenance_ratio_per_year"
),
"year_use_days"
:
body
.
get
(
"year_use_days"
),
# 一年可利用时间
"evaluate_year"
:
evaluate_year
,
# 评估年限
"subsidy_year"
:
body
.
get
(
"subsidy_year"
),
# 补贴年限
...
...
@@ -465,7 +470,8 @@ async def ess_evaluate_compute_service(download=None, url=None, **body):
table_name
=
f
"{company['shortname']}_{body.get('start')}"
\
f
"_{body.get('end')}储能测算表"
return
await
dataframe_excl_download
(
evaluate_table
,
table_name
)
curve
=
(
obj
.
opt_curve
.
where
(
obj
.
opt_curve
.
notnull
(),
None
))
.
round
(
2
)
.
reset_index
()
curve
=
(
obj
.
opt_curve
.
where
(
obj
.
opt_curve
.
notnull
(),
None
))
.
round
(
2
)
.
reset_index
()
opt_curve
=
OptCurve
(
slot
=
curve
[
"quarter_time"
]
.
values
.
tolist
(),
load_curve
=
curve
[
"load_curve"
]
.
values
.
tolist
(),
...
...
@@ -512,7 +518,8 @@ async def electrovalence_setting_service(cid, price_md, price_tc, std_cos,
start
=
slot
[
0
]
.
split
(
":"
)
end
=
slot
[
1
]
.
split
(
":"
)
if
int
(
end
[
0
])
<
int
(
start
[
0
])
or
\
(
int
(
end
[
0
])
==
int
(
start
[
0
])
and
int
(
end
[
1
])
<=
int
(
start
[
1
])):
(
int
(
end
[
0
])
==
int
(
start
[
0
])
and
int
(
end
[
1
])
<=
int
(
start
[
1
])):
return
success_res
(
code
=
400
,
msg
=
"结束时间需要大于开始时间"
)
fina
=
int
(
end
[
0
])
+
1
if
end
[
1
]
!=
"00"
else
int
(
end
[
0
])
for
index
,
num
in
enumerate
(
range
(
int
(
start
[
0
]),
fina
)):
...
...
@@ -636,4 +643,4 @@ def get_section_time_slot(elecs):
end_h
+=
1
for
i
in
range
(
start_h
,
end_h
):
d
[
name
]
.
append
(
"
%02
d:00"
%
i
)
return
d
\ No newline at end of file
return
d
unify_api/modules/elec_charge/views/elec_statistics.py
View file @
984a952d
...
...
@@ -2,8 +2,7 @@ from unify_api.modules.elec_charge.common.utils import \
power_charge
,
max_min_time
,
power_charge_new15
from
pot_libs.sanic_api
import
summary
from
pot_libs.utils.pendulum_wrapper
import
my_pendulum
from
unify_api.modules.common.procedures.pttl_max
import
pttl_max
,
\
pttl_max_new15
from
unify_api.modules.common.procedures.pttl_max
import
load_pttl_max
from
unify_api.modules.elec_charge.components.elec_statistics_cps
import
\
PcStatiReq
,
PcStatiResp
,
MaxpReq
,
MaxpResp
,
PcmResp
from
unify_api.utils.common_utils
import
round_2
...
...
@@ -31,7 +30,7 @@ async def post_max_p(req, body: MaxpReq) -> MaxpResp:
point_id
=
body
.
point_id
start
=
body
.
start
end
=
body
.
end
max_val
,
max_val_time
=
await
pttl_max_new15
(
cid
,
start
,
end
,
point_id
=
point_id
)
max_val
,
max_val_time
=
await
load_pttl_max
(
cid
,
start
,
end
,
point_id
=
point_id
)
return
MaxpResp
(
maxp
=
max_val
,
date_time
=
max_val_time
)
...
...
@@ -121,7 +120,7 @@ async def power_charge_min_max_service_new15(cid, pid, start, end, date_type):
min_kwh
=
{
"value"
:
""
,
"time"
:
""
}
max_charge
=
{
"value"
:
""
,
"time"
:
""
}
min_charge
=
{
"value"
:
""
,
"time"
:
""
}
max_val
,
max_val_time
=
await
pttl_max_new15
(
cid
,
start
,
end
,
point_id
=
pid
)
max_val
,
max_val_time
=
await
load_pttl_max
(
cid
,
start
,
end
,
point_id
=
pid
)
max_p
[
"value"
]
=
round_2
(
max_val
)
max_p
[
"time"
]
=
max_val_time
# 2. 如果是日统计,则需要增加今日/昨日负荷曲线, 15min一个点
...
...
unify_api/modules/home_page/procedures/count_info_pds.py
View file @
984a952d
...
...
@@ -20,9 +20,7 @@ from unify_api.modules.common.dao.common_dao import monitor_point_join
from
unify_api.modules.common.procedures.common_utils
import
get_electric_index
from
unify_api.modules.common.procedures.points
import
proxy_points
,
\
get_points_num
from
unify_api.modules.common.procedures.pttl_max
import
pttl_max
,
\
pttl_max_new15
from
unify_api.modules.electric.views.electric
import
METERDATA_CURRENT_KEY
from
unify_api.modules.common.procedures.pttl_max
import
load_pttl_max
from
unify_api.modules.home_page.components.count_info_cps
import
(
MaxResidualCurrent
,
ElectricInfo
,
...
...
@@ -308,7 +306,7 @@ async def power_count_info(cid):
start_time
=
(
now
-
timedelta
(
30
))
.
strftime
(
"
%
Y-
%
m-
%
d 00:00:00"
)
end_time
=
now
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
max_30d_load
,
_time
=
await
pttl_max_new15
(
cid
,
start_time
,
end_time
,
-
1
)
max_30d_load
,
_time
=
await
load_pttl_max
(
cid
,
start_time
,
end_time
,
-
1
)
cur_load
=
await
real_time_load
(
cid
)
return
round_2
(
cur_load
),
round_2
(
max_30d_load
)
...
...
unify_api/modules/load_analysis/views/load_distribution.py
View file @
984a952d
...
...
@@ -3,8 +3,7 @@ import re
from
unify_api.utils.common_utils
import
round_2
from
pot_libs.mysql_util
import
mysql_util
from
pot_libs.sanic_api
import
summary
from
unify_api.modules.common.procedures.pttl_max
import
pttl_max
,
\
pttl_max_15min
,
pttl_max_15min_new15
from
unify_api.modules.common.procedures.pttl_max
import
load_pttl_max_15min
from
unify_api.modules.load_analysis.components.load_distribution_cps
import
\
DistributionReq
,
DistributionResp
,
LrBins
,
MaxpResp
...
...
@@ -53,11 +52,9 @@ async def post_load_distribution(req,
# 峰谷差
peak_valley
=
result
[
"peak_valley"
]
# 最高负荷
# max_val, max_val_time = await pttl_max_15min(cid=cid, start=start,
# end=end, inline_id=inline_id)
max_val
,
max_val_time
=
await
pttl_max_15min_new15
(
cid
=
cid
,
start
=
start
,
end
=
end
,
inline_id
=
inline_id
)
max_val
,
max_val_time
=
await
load_pttl_max_15min
(
cid
=
cid
,
start
=
start
,
end
=
end
,
inline_id
=
inline_id
)
max_p
=
MaxpResp
(
maxp
=
round_2
(
max_val
),
date_time
=
max_val_time
)
return
DistributionResp
(
base_load
=
base_load
,
mean_load_rate
=
mean_load_rate
,
peak_valley
=
peak_valley
,
max_load
=
max_p
,
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment