Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
U
unify_api2
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
chaonan
unify_api2
Commits
d85b4ef3
Commit
d85b4ef3
authored
Apr 18, 2023
by
lcn
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
修复Bug
parent
b26c194c
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
94 additions
and
188 deletions
+94
-188
syncretize_energy_dao.py
unify_api/modules/elec_charge/dao/syncretize_energy_dao.py
+2
-2
electric_service.py
unify_api/modules/electric/service/electric_service.py
+1
-1
pv_distributed_photovoltaic.py
...es/energy_optimize/service/pv_distributed_photovoltaic.py
+91
-185
No files found.
unify_api/modules/elec_charge/dao/syncretize_energy_dao.py
View file @
d85b4ef3
...
...
@@ -28,10 +28,10 @@ async def get_elec_price_dao(cid):
async
def
get_max_demand_by_inlid
(
inlids
):
sql
=
"""SELECT
a.has_space,b.related_inlids FROM
sql
=
"""SELECT
'[]' as has_space,b.inlid as related_inlids FROM
`algo_md_space_analysis_result` a
LEFT JOIN algo_md_space_analysis_unit b on a.space_analysis_id=b.id
WHERE b.
related_inlids in
%
s ORDER BY a.create_time
DESC LIMIT
%
s"""
WHERE b.
inlid in
%
s ORDER BY a.month
DESC LIMIT
%
s"""
async
with
MysqlUtil
()
as
conn
:
spaces
=
await
conn
.
fetchall
(
sql
,
args
=
(
inlids
,
len
(
inlids
)))
return
spaces
...
...
unify_api/modules/electric/service/electric_service.py
View file @
d85b4ef3
...
...
@@ -888,7 +888,7 @@ async def elec_current_service_new15(point_id):
url
=
f
"{SETTING.stb_url}db_electric?tz=Asia/Shanghai"
sql
=
f
"select last_row(*) from mt{mtid}_ele where pid={point_id}"
is_succ
,
results
=
await
get_td_engine_data
(
url
,
sql
)
if
not
is_succ
or
not
results
:
if
not
is_succ
or
not
results
or
results
.
get
(
"code"
)
>
0
:
return
''
,
{}
head
=
parse_td_columns
(
results
)
if
not
results
[
"data"
]:
...
...
unify_api/modules/energy_optimize/service/pv_distributed_photovoltaic.py
View file @
d85b4ef3
...
...
@@ -21,14 +21,14 @@ from unify_api.modules.energy_optimize.service.pv_optimation_tool import \
class
PhotovoltaicOptimize
(
object
):
def
__init__
(
self
,
inlid
):
self
.
_inlid
=
inlid
async
def
calc_inline
(
self
,
pv_params
):
rlt
=
{
'rlt_flag'
:
True
}
inl_info
=
await
self
.
_get_inline_info
()
inline_vc
=
inl_info
[
'inline_vc'
]
cid
=
inl_info
[
'cid'
]
city
=
await
self
.
_get_company_city
(
cid
)
df_pv
=
await
self
.
_construct_pv_curve
(
city
)
# construct df_pv
if
len
(
df_pv
)
==
0
:
rlt
[
'rlt_flag'
]
=
False
...
...
@@ -39,12 +39,13 @@ class PhotovoltaicOptimize(object):
rlt
[
'rlt_flag'
]
=
False
rlt
[
'message'
]
=
'暂无'
return
rlt
pp
=
await
self
.
_get_company_price_policy
(
cid
)
pp_info_d
=
PricePolicyHelper
.
map_price_policy
(
pp
,
inline_vc
,
max_dt
.
int_timestamp
)
time_str_d
=
PricePolicyHelper
.
quarter_chars_2_time_str
(
pp_info_d
[
'quarters'
])
max_dt
.
int_timestamp
)
time_str_d
=
PricePolicyHelper
.
quarter_chars_2_time_str
(
pp_info_d
[
'quarters'
])
# construct pv_system
price_type
=
pp_info_d
[
'price_type'
]
pv_system
=
await
self
.
_construct_pv_system
(
pv_params
,
price_type
)
...
...
@@ -57,25 +58,25 @@ class PhotovoltaicOptimize(object):
inline_var
=
await
self
.
_construct_inline_var
(
tc_runtime
)
# construct df_load
df_load
=
await
self
.
_construct_load_curve
(
max_dt
)
# logger.info('pv_system: %s', pv_system)
# logger.info('price: %s', price)
# logger.info('env_benifit: %s', env_benifit)
# logger.info('inline_var: %s', inline_var)
# logger.info('df_load: %s', df_load)
# logger.info('df_pv: %s', df_pv)
pv_ot
=
PvOptimizationTool
(
pv_system
,
price
,
env_benifit
,
df_load
,
df_pv
,
inline_var
)
pv_ot
.
output
()
# assemble return value
rlt
[
'install_cap'
]
=
self
.
_assemble_install_cap
(
pv_ot
)
rlt
[
'invest_evaluate'
]
=
self
.
_assemble_invest_evaluate
(
pv_ot
)
rlt
[
'opt_analysis'
]
=
pv_ot
.
opt_analysis
rlt
[
'opt_curve'
]
=
self
.
_assemble_opt_curve
(
pv_ot
)
return
rlt
def
_assemble_install_cap
(
self
,
pv_ot
):
install_cap
=
{
'capacity'
:
pv_ot
.
invest_capacity
[
"capacity"
],
...
...
@@ -84,7 +85,7 @@ class PhotovoltaicOptimize(object):
'first_year_ttl_kwh'
:
pv_ot
.
invest_capacity
[
"first_year_ttl_kwh"
]
}
return
install_cap
def
_assemble_invest_evaluate
(
self
,
pv_ot
):
cost_per_kwh
=
(
pv_ot
.
price
[
"rmb_per_wp"
]
-
pv_ot
.
price
[
"first_install_subsidy"
])
...
...
@@ -94,7 +95,7 @@ class PhotovoltaicOptimize(object):
first_year_income
=
invest_income
[
"first_year_income"
]
first_year_income_rate
=
invest_income
[
"first_year_income_rate"
]
invest_income_year
=
invest_income
[
"invest_income_year"
]
i_and_r
=
{
'user_type'
:
pv_ot
.
pv_system
[
"user_type"
],
'ttl_invest'
:
pv_ot
.
invest_capacity
[
"ttl_invest"
],
...
...
@@ -114,7 +115,7 @@ class PhotovoltaicOptimize(object):
'first_year_income_rate'
:
first_year_income_rate
,
'invest_income_year'
:
invest_income_year
}
env_benifit
=
pv_ot
.
invest_evaluate
[
"env_benifit_per_year"
]
c_footprint
=
{
'families'
:
env_benifit
[
'one_family_kwh'
],
...
...
@@ -126,10 +127,10 @@ class PhotovoltaicOptimize(object):
'smoke'
:
env_benifit
[
'Smoke'
],
'H2O'
:
env_benifit
[
'H2O'
]
}
invest_evaluate
=
{
'i_and_r'
:
i_and_r
,
'carbon_footprint'
:
c_footprint
}
return
invest_evaluate
def
_assemble_opt_curve
(
self
,
pv_ot
):
rlt
=
[]
for
idx
,
row
in
pv_ot
.
opt_curve
.
iterrows
():
...
...
@@ -140,7 +141,7 @@ class PhotovoltaicOptimize(object):
tmpd
[
'pv_curve'
]
=
row
[
'pv_curve'
]
rlt
.
append
(
tmpd
)
return
rlt
async
def
_construct_pv_system
(
self
,
pv_params
,
price_type
):
area
=
pv_params
[
'install_space'
]
# ratio fixed, convert to decimal, web backend just pass us a
...
...
@@ -151,10 +152,10 @@ class PhotovoltaicOptimize(object):
"where i.inlid=
%
s and i.cid=c.cid and "
"c.city=pv.city;"
)
async
with
MysqlUtil
()
as
conn
:
hours
=
await
conn
.
fetchone
(
sql
,
(
self
.
_inlid
,))
hours
=
hours
.
get
(
"peak_sunshine_hours"
)
hours
=
await
conn
.
fetchone
(
sql
,
(
self
.
_inlid
,))
or
{}
hours
=
hours
.
get
(
"peak_sunshine_hours"
)
or
0
annual_hours
=
hours
*
365
# peak_sunshine_hours means annual_peak_sunshine_hours, the name
# in algorithm is misleading
pv_system
=
{
...
...
@@ -169,7 +170,7 @@ class PhotovoltaicOptimize(object):
"peak_sunshine_hours"
:
annual_hours
# 年峰值日照小时数
}
return
pv_system
def
_construct_price
(
self
,
pv_params
,
pp_info_d
,
time_str_d
):
cons_p
=
pv_params
[
'rmb_per_wp'
]
user_p
=
pv_params
[
'sel_use_per_kwh'
]
...
...
@@ -205,7 +206,7 @@ class PhotovoltaicOptimize(object):
sct
=
self
.
_construct_section
(
'v'
,
pp_info_d
,
time_str_d
)
sfpv_price
[
'section_v'
]
=
sct
return
price_d
def
_construct_env_benifit
(
self
):
env_benifit_param
=
{
"one_family_kwh"
:
3600
,
# 一户家庭一年用3600度电
...
...
@@ -218,147 +219,86 @@ class PhotovoltaicOptimize(object):
"tree"
:
18.3
# 1棵树1年可吸收18.3千克CO2
}
return
env_benifit_param
def
_construct_section
(
self
,
p_char
,
pp_info_d
,
time_str_d
):
""" contruct section_x for price_d."""
section
=
{
'price'
:
pp_info_d
[
'price_'
+
p_char
]}
time_range_str
=
';'
.
join
(
time_str_d
[
p_char
])
section
[
'time_range'
]
=
time_range_str
return
section
def
_build_kwh_charge_sum_lastest_30
(
self
,
p_char
):
async
def
_build_kwh_charge_sum_lastest_30
(
self
,
p_char
):
""" build es query sentance for get kwh sum and charge sum
within lastest 30 days for specified p_char.
"""
sql
=
f
"""
select sum(kwh) kwh,sum(charge) charge from inline_15min_power
where inlid =
%
s and spfv =
%
s and create_time >=
%
s and create_time
<
%
s
"""
dt
=
pendulum
.
now
()
dt_1_month_ago
=
dt
.
subtract
(
days
=
30
)
q
=
{
"size"
:
0
,
"query"
:
{
"bool"
:
{
"must"
:
[
{
"term"
:
{
"inlid"
:
{
"value"
:
self
.
_inlid
}
}},
{
"term"
:
{
"spfv"
:
{
"value"
:
p_char
}
}},
{
"range"
:
{
"quarter_time"
:
{
"gte"
:
str
(
dt_1_month_ago
),
"lt"
:
str
(
dt
)
}
}}
]
}
},
"aggs"
:
{
"kwh"
:
{
"sum"
:
{
"field"
:
"kwh"
}
},
"charge"
:
{
"sum"
:
{
"field"
:
"charge"
}
}
}
}
return
q
start_time
=
dt_1_month_ago
.
format
(
"YYYY-MM-DD HH:mm:ss"
)
end_time
=
dt
.
format
(
"YYYY-MM-DD HH:mm:ss"
)
async
with
MysqlUtil
()
as
conn
:
result
=
await
conn
.
fetchone
(
sql
,
args
=
(
self
.
_inlid
,
p_char
,
start_time
,
end_time
))
return
result
or
{}
async
def
_construct_inline_var
(
self
,
inline_tc
):
inline_var
=
{
'inline_capacity'
:
inline_tc
}
q
=
self
.
_build_kwh_charge_sum_lastest_30
(
"s"
)
result
=
await
self
.
_build_kwh_charge_sum_lastest_30
(
"s"
)
# search_rlt = self._es.search(inline_15min_power_esindex, q)
async
with
EsUtil
()
as
es
:
search_rlt
=
await
es
.
search_origin
(
body
=
q
,
index
=
INLINE_15MIN_POWER_ESINDEX
)
charge_s
=
search_rlt
[
'aggregations'
][
'charge'
][
'value'
]
kwh_s
=
search_rlt
[
'aggregations'
][
'kwh'
][
'value'
]
q
=
self
.
_build_kwh_charge_sum_lastest_30
(
"p"
)
charge_s
=
result
.
get
(
"charge"
)
or
0
kwh_s
=
result
.
get
(
"kwh"
)
or
0
result
=
await
self
.
_build_kwh_charge_sum_lastest_30
(
"p"
)
# search_rlt = self._es.search(inline_15min_power_esindex, q)
async
with
EsUtil
()
as
es
:
search_rlt
=
await
es
.
search_origin
(
body
=
q
,
index
=
INLINE_15MIN_POWER_ESINDEX
)
charge_p
=
search_rlt
[
'aggregations'
][
'charge'
][
'value'
]
kwh_p
=
search_rlt
[
'aggregations'
][
'kwh'
][
'value'
]
charge_p
=
result
.
get
(
"charge"
)
or
0
kwh_p
=
result
.
get
(
"kwh"
)
or
0
# add 's' and 'p', because algorithm needs these
charge_sp
=
charge_s
+
charge_p
kwh_sp
=
kwh_s
+
kwh_p
inline_var
[
'peak_charge'
]
=
charge_sp
inline_var
[
'peak_kwh'
]
=
kwh_sp
q
=
self
.
_build_kwh_charge_sum_lastest_30
(
"f"
)
result
=
await
self
.
_build_kwh_charge_sum_lastest_30
(
"f"
)
# search_rlt = self._es.search(inline_15min_power_esindex, q)
async
with
EsUtil
()
as
es
:
search_rlt
=
await
es
.
search_origin
(
body
=
q
,
index
=
INLINE_15MIN_POWER_ESINDEX
)
charge_f
=
search_rlt
[
'aggregations'
][
'charge'
][
'value'
]
kwh_f
=
search_rlt
[
'aggregations'
][
'kwh'
][
'value'
]
charge_f
=
result
.
get
(
"charge"
)
or
0
kwh_f
=
result
.
get
(
"kwh"
)
or
0
inline_var
[
'flat_charge'
]
=
charge_f
inline_var
[
'flat_kwh'
]
=
kwh_f
return
inline_var
def
_build_load_curve
(
self
,
start_dt
):
async
def
_build_load_curve
(
self
,
start_dt
):
end_dt
=
start_dt
.
add
(
days
=
1
)
q
=
{
"size"
:
100
,
"_source"
:
[
"quarter_time"
,
"p"
],
"query"
:
{
"bool"
:
{
"must"
:
[
{
"term"
:
{
"inlid"
:
{
"value"
:
self
.
_inlid
}
}},
{
"range"
:
{
"quarter_time"
:
{
"gte"
:
str
(
start_dt
),
"lt"
:
str
(
end_dt
)
}
}}
]
}
},
"sort"
:
[
{
"quarter_time"
:
{
"order"
:
"asc"
}
}
]
}
return
q
start_time
=
start_dt
.
format
(
"YYYY-MM-DD HH:mm:ss"
)
end_time
=
end_dt
.
format
(
"YYYY-MM-DD HH:mm:ss"
)
sql
=
f
"""
select create_time,p from inline_15min_power
where inlid =
%
s and create_time >=
%
s and create_time <
%
s
order by create_time asc limit 100
"""
async
with
MysqlUtil
()
as
conn
:
results
=
await
conn
.
fetchall
(
sql
,
args
=
(
self
.
_inlid
,
start_time
,
end_time
))
return
results
or
[]
async
def
_construct_load_curve
(
self
,
start_dt
):
q
=
self
.
_build_load_curve
(
start_dt
)
# search_rlt = self._es.search(inline_15min_power_esindex, q)
async
with
EsUtil
()
as
es
:
search_rlt
=
await
es
.
search_origin
(
body
=
q
,
index
=
INLINE_15MIN_POWER_ESINDEX
)
hits_list
=
await
self
.
_build_load_curve
(
start_dt
)
# hits_list is already sorted by quarter_time asc
hits_list
=
search_rlt
[
'hits'
][
'hits'
]
kw_list
=
[]
for
item
in
hits_list
:
src_d
=
item
[
'_source'
]
qrt_str
=
src_d
[
'quarter_time'
]
dt
=
pendulum
.
from_format
(
qrt_str
,
'YYYY-MM-DDTHH:mm:ssZ'
,
tz
=
'Asia/Shanghai'
)
qrt_dt
=
datetime
.
datetime
(
year
=
dt
.
year
,
month
=
dt
.
month
,
day
=
dt
.
day
,
hour
=
dt
.
hour
,
minute
=
dt
.
minute
,
second
=
dt
.
second
)
kw_list
.
append
({
'quarter_time'
:
qrt_dt
,
'load_curve'
:
src_d
[
'p'
]})
kw_list
.
append
({
'quarter_time'
:
item
.
get
(
'create_time'
),
'load_curve'
:
item
.
get
(
'p'
)
})
df
=
pd
.
DataFrame
(
kw_list
)
return
df
async
def
_construct_pv_curve
(
self
,
city
):
sql
=
"select hour, p from algo_distributed_pv where city=
%
s "
\
"order by hour asc"
...
...
@@ -374,7 +314,7 @@ class PhotovoltaicOptimize(object):
pv_list
.
append
({
'quarter_time'
:
qrt_dt
,
'pv_curve'
:
item
[
'p'
]})
df
=
pd
.
DataFrame
(
pv_list
)
return
df
async
def
_get_inline_info
(
self
):
""" get inline_vc, tc_runtime, cid from redis.
:return: a dict
...
...
@@ -387,7 +327,7 @@ class PhotovoltaicOptimize(object):
'tc_runtime'
:
info
[
'tc_runtime'
],
'cid'
:
info
[
'cid'
]}
return
rlt
async
def
_get_company_price_policy
(
self
,
cid
):
result
=
AutoDic
()
sql
=
'SELECT * FROM price_policy where cid =
%
s'
...
...
@@ -397,7 +337,7 @@ class PhotovoltaicOptimize(object):
result
[
str
(
policy
[
'inline_vc'
])][
str
(
policy
[
'start_month'
])][
policy
[
'time_range'
]]
=
policy
return
result
async
def
_get_company_city
(
self
,
cid
):
sql
=
"SELECT city from company where cid =
%
s"
async
with
MysqlUtil
()
as
conn
:
...
...
@@ -405,60 +345,26 @@ class PhotovoltaicOptimize(object):
# company_j = self._r_cache.hget(company_hashname, str(cid))
# c_info = json.loads(company_j)
return
c_info
[
'city'
]
def
_build_max_kwh_day
(
self
):
""" build es query sentance for find max kwh day."""
dt
=
pendulum
.
now
()
dt_half_year_ago
=
dt
.
subtract
(
months
=
6
)
q
=
{
"size"
:
1
,
"query"
:
{
"bool"
:
{
"must"
:
[
{
"term"
:
{
"inlid"
:
{
"value"
:
self
.
_inlid
}
}},
{
"range"
:
{
"day"
:
{
"gte"
:
str
(
dt_half_year_ago
),
"lt"
:
str
(
dt
)
}
}}
]
}
},
"sort"
:
[
{
"kwh"
:
{
"order"
:
"desc"
}
}
]
}
return
q
async
def
_find_kwh_max_day
(
self
):
""" find the max kwh day in latest 6 months.
:return: a dt object, or None if no doc
sql
=
f
"""
select create_time from inline_1day_power
where inlid =
%
s and create_time >=
%
s and create_time <
%
s
order by kwh desc limit 1;
"""
rlt
=
None
q
=
self
.
_build_max_kwh_day
()
# search_rlt = self._es.search(inline_1day_power_esindex, q)
async
with
EsUtil
()
as
es
:
search_rlt
=
await
es
.
search_origin
(
body
=
q
,
index
=
INLINE_1DAY_POWER_ESINDEX
)
hits_list
=
search_rlt
[
'hits'
][
'hits'
]
try
:
max_day_doc
=
hits_list
[
0
][
'_source'
]
except
IndexError
:
pass
else
:
day_str
=
max_day_doc
[
'day'
]
rlt
=
pendulum
.
from_format
(
day_str
,
'YYYY-MM-DDTHH:mm:ssZ'
,
tz
=
'Asia/Shanghai'
)
return
rlt
dt
=
pendulum
.
now
()
dt_half_year_ago
=
dt
.
subtract
(
months
=
6
)
start_time
=
dt_half_year_ago
.
format
(
"YYYY-MM-DD HH:mm:ss"
)
end_time
=
dt
.
format
(
"YYYY-MM-DD HH:mm:ss"
)
async
with
MysqlUtil
()
as
conn
:
result
=
await
conn
.
fetchone
(
sql
,
args
=
(
self
.
_inlid
,
start_time
,
end_time
))
if
not
result
:
return
None
max_dt
=
result
.
get
(
"create_time"
)
.
strftime
(
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
return
pendulum
.
parse
(
max_dt
)
async
def
pv_out_result
(
inlid
,
params
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment