Examples¶
Bid stack equivalent market¶
This example implements a one region market that mirrors the ‘bid stack’ model of an electricity market. Under the bid stack model, generators are dispatched according to their bid prices, from cheapest to most expensive, until all demand is satisfied. No loss factors, ramping constraints or other factors are considered.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 | import pandas as pd
from nempy import markets
# Volume of each bid, number of bands must equal number of bands in price_bids.
volume_bids = pd.DataFrame({
'unit': ['A', 'B'],
'1': [20.0, 50.0], # MW
'2': [20.0, 30.0], # MW
'3': [5.0, 10.0] # More bid bands could be added.
})
# Price of each bid, bids must be monotonically increasing.
price_bids = pd.DataFrame({
'unit': ['A', 'B'],
'1': [50.0, 50.0], # $/MW
'2': [60.0, 55.0], # $/MW
'3': [100.0, 80.0] # . . .
})
# Other unit properties
unit_info = pd.DataFrame({
'unit': ['A', 'B'],
'region': ['NSW', 'NSW'], # MW
})
# The demand in the region\s being dispatched
demand = pd.DataFrame({
'region': ['NSW'],
'demand': [120.0] # MW
})
# Create the market model
market = markets.SpotMarket(unit_info=unit_info, market_regions=['NSW'])
market.set_unit_volume_bids(volume_bids)
market.set_unit_price_bids(price_bids)
market.set_demand_constraints(demand)
# Calculate dispatch and pricing
market.dispatch()
# Return the total dispatch of each unit in MW.
print(market.get_unit_dispatch())
# unit service dispatch
# 0 A energy 40.0
# 1 B energy 80.0
# Return the price of energy in each region.
print(market.get_energy_prices())
# region price
# 0 NSW 60.0
|
Unit loss factors, capacities and ramp rates¶
In this example units are given loss factors, capacity values and ramp rates.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 | import pandas as pd
from nempy import markets
# Volume of each bid, number of bands must equal number of bands in price_bids.
volume_bids = pd.DataFrame({
'unit': ['A', 'B'],
'1': [20.0, 50.0], # MW
'2': [20.0, 30.0], # MW
'3': [5.0, 10.0] # More bid bands could be added.
})
# Price of each bid, bids must be monotonically increasing.
price_bids = pd.DataFrame({
'unit': ['A', 'B'],
'1': [50.0, 50.0], # $/MW
'2': [60.0, 55.0], # $/MW
'3': [100.0, 80.0] # . . .
})
# Factors limiting unit output.
unit_limits = pd.DataFrame({
'unit': ['A', 'B'],
'initial_output': [55.0, 90.0], # MW
'capacity': [55.0, 90.0], # MW
'ramp_up_rate': [1000.0, 1500.0], # MW/h
'ramp_down_rate': [1000.0, 1500.0] # MW/h
})
# Other unit properties including loss factors.
unit_info = pd.DataFrame({
'unit': ['A', 'B'],
'region': ['NSW', 'NSW'], # MW
'loss_factor': [0.9, 0.95]
})
# The demand in the region\s being dispatched.
demand = pd.DataFrame({
'region': ['NSW'],
'demand': [100.0] # MW
})
# Create the market model
market = markets.SpotMarket(unit_info=unit_info,
market_regions=['NSW'])
market.set_unit_volume_bids(volume_bids)
market.set_unit_price_bids(price_bids)
market.set_unit_bid_capacity_constraints(
unit_limits.loc[:, ['unit', 'capacity']])
market.set_unit_ramp_up_constraints(
unit_limits.loc[:, ['unit', 'initial_output', 'ramp_up_rate']])
market.set_unit_ramp_down_constraints(
unit_limits.loc[:, ['unit', 'initial_output', 'ramp_down_rate']])
market.set_demand_constraints(demand)
# Calculate dispatch and pricing
market.dispatch()
# Return the total dispatch of each unit in MW.
print(market.get_unit_dispatch())
# unit service dispatch
# 0 A energy 20.0
# 1 B energy 80.0
# Return the price of energy in each region.
print(market.get_energy_prices())
# region price
# 0 NSW 57.89
|
Interconnector with losses¶
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 | import pandas as pd
from nempy import markets
# The only generator is located in NSW.
unit_info = pd.DataFrame({
'unit': ['A'],
'region': ['NSW'] # MW
})
# Create a market instance.
market = markets.SpotMarket(unit_info=unit_info, market_regions=['NSW', 'VIC'])
# Volume of each bids.
volume_bids = pd.DataFrame({
'unit': ['A'],
'1': [100.0] # MW
})
market.set_unit_volume_bids(volume_bids)
# Price of each bid.
price_bids = pd.DataFrame({
'unit': ['A'],
'1': [50.0] # $/MW
})
market.set_unit_price_bids(price_bids)
# NSW has no demand but VIC has 90 MW.
demand = pd.DataFrame({
'region': ['NSW', 'VIC'],
'demand': [0.0, 90.0] # MW
})
market.set_demand_constraints(demand)
# There is one interconnector between NSW and VIC. Its nominal direction is towards VIC.
interconnectors = pd.DataFrame({
'interconnector': ['little_link'],
'to_region': ['VIC'],
'from_region': ['NSW'],
'max': [100.0],
'min': [-120.0]
})
market.set_interconnectors(interconnectors)
# The interconnector loss function. In this case losses are always 5 % of line flow.
def constant_losses(flow):
return abs(flow) * 0.05
# The loss function on a per interconnector basis. Also details how the losses should be proportioned to the
# connected regions.
loss_functions = pd.DataFrame({
'interconnector': ['little_link'],
'from_region_loss_share': [0.5], # losses are shared equally.
'loss_function': [constant_losses]
})
# The points to linearly interpolate the loss function between. In this example the loss function is linear so only
# three points are needed, but if a non linear loss function was used then more points would be better.
interpolation_break_points = pd.DataFrame({
'interconnector': ['little_link', 'little_link', 'little_link'],
'loss_segment': [1, 2, 3],
'break_point': [-120.0, 0.0, 100]
})
market.set_interconnector_losses(loss_functions, interpolation_break_points)
# Calculate dispatch.
market.dispatch()
# Return the total dispatch of each unit in MW.
print(market.get_unit_dispatch())
# unit service dispatch
# 0 A energy 94.615385
# Return interconnector flow and losses.
print(market.get_interconnector_flows())
# interconnector flow losses
# 0 little_link 92.307692 4.615385
# Return the price of energy in each region.
print(market.get_energy_prices())
# region price
# 0 NSW 50.000000
# 1 VIC 52.564103
|
Dynamic non-linear interconnector losses¶
Implements creating loss functions as described in
Marginal Loss Factors documentation section 3 to 5
.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 | import pandas as pd
from nempy import markets
from nempy.historical_inputs import interconnectors as interconnector_inputs
# The only generator is located in NSW.
unit_info = pd.DataFrame({
'unit': ['A'],
'region': ['NSW'] # MW
})
# Create a market instance.
market = markets.SpotMarket(unit_info=unit_info,
market_regions=['NSW', 'VIC'])
# Volume of each bids.
volume_bids = pd.DataFrame({
'unit': ['A'],
'1': [1000.0] # MW
})
market.set_unit_volume_bids(volume_bids)
# Price of each bid.
price_bids = pd.DataFrame({
'unit': ['A'],
'1': [50.0] # $/MW
})
market.set_unit_price_bids(price_bids)
# NSW has no demand but VIC has 800 MW.
demand = pd.DataFrame({
'region': ['NSW', 'VIC'],
'demand': [0.0, 800.0], # MW
'loss_function_demand': [0.0, 800.0] # MW
})
market.set_demand_constraints(demand.loc[:, ['region', 'demand']])
# There is one interconnector between NSW and VIC.
# Its nominal direction is towards VIC.
interconnectors = pd.DataFrame({
'interconnector': ['VIC1-NSW1'],
'to_region': ['VIC'],
'from_region': ['NSW'],
'max': [1000.0],
'min': [-1200.0]
})
market.set_interconnectors(interconnectors)
# Create a demand dependent loss function.
# Specify the demand dependency
demand_coefficients = pd.DataFrame({
'interconnector': ['VIC1-NSW1', 'VIC1-NSW1'],
'region': ['NSW1', 'VIC1'],
'demand_coefficient': [0.000021734, -0.000031523]})
# Specify the loss function constant and flow coefficient.
interconnector_coefficients = pd.DataFrame({
'interconnector': ['VIC1-NSW1'],
'loss_constant': [1.0657],
'flow_coefficient': [0.00017027],
'from_region_loss_share': [0.5]})
# Create loss functions on per interconnector basis.
loss_functions = interconnector_inputs._create_loss_functions(
interconnector_coefficients, demand_coefficients,
demand.loc[:, ['region', 'loss_function_demand']])
# The points to linearly interpolate the loss function between.
interpolation_break_points = pd.DataFrame({
'interconnector': 'VIC1-NSW1',
'loss_segment': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
'break_point': [-1200.0, -1000.0, -800.0, -600.0, -400.0, -200.0,
0.0, 200.0, 400.0, 600.0, 800.0, 1000]
})
market.set_interconnector_losses(loss_functions,
interpolation_break_points)
# Calculate dispatch.
market.dispatch()
# Return the total dispatch of each unit in MW.
print(market.get_unit_dispatch())
# unit service dispatch
# 0 A energy 920.205473
# Return interconnector flow and losses.
print(market.get_interconnector_flows())
# interconnector flow losses
# 0 VIC1-NSW1 860.102737 120.205473
# Return the price of energy in each region.
print(market.get_energy_prices())
# region price
# 0 NSW 50.000000
# 1 VIC 62.292869
|
Simple FCAS markets¶
Implements a market for energy, regulation raise and contingency 6 sec raise, with
co-optimisation constraints as described in section 6.2 and 6.3 of
FCAS Model in NEMDE
.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 | import pandas as pd
from nempy import markets
# Set options so you see all DataFrame columns in print outs.
pd.options.display.width = 0
# Volume of each bid.
volume_bids = pd.DataFrame({
'unit': ['A', 'A', 'B', 'B', 'B'],
'service': ['energy', 'raise_6s', 'energy',
'raise_6s', 'raise_reg'],
'1': [100.0, 10.0, 110.0, 15.0, 15.0], # MW
})
print(volume_bids)
# unit service 1
# 0 A energy 100.0
# 1 A raise_6s 10.0
# 2 B energy 110.0
# 3 B raise_6s 15.0
# 4 B raise_reg 15.0
# Price of each bid.
price_bids = pd.DataFrame({
'unit': ['A', 'A', 'B', 'B', 'B'],
'service': ['energy', 'raise_6s', 'energy',
'raise_6s', 'raise_reg'],
'1': [50.0, 35.0, 60.0, 20.0, 30.0], # $/MW
})
print(price_bids)
# unit service 1
# 0 A energy 50.0
# 1 A raise_6s 35.0
# 2 B energy 60.0
# 3 B raise_6s 20.0
# 4 B raise_reg 30.0
# Participant defined operational constraints on FCAS enablement.
fcas_trapeziums = pd.DataFrame({
'unit': ['B', 'B', 'A'],
'service': ['raise_reg', 'raise_6s', 'raise_6s'],
'max_availability': [15.0, 15.0, 10.0],
'enablement_min': [50.0, 50.0, 70.0],
'low_break_point': [65.0, 65.0, 80.0],
'high_break_point': [95.0, 95.0, 100.0],
'enablement_max': [110.0, 110.0, 110.0]
})
print(fcas_trapeziums)
# unit service max_availability enablement_min low_break_point high_break_point enablement_max
# 0 B raise_reg 15.0 50.0 65.0 95.0 110.0
# 1 B raise_6s 15.0 50.0 65.0 95.0 110.0
# 2 A raise_6s 10.0 70.0 80.0 100.0 110.0
# Unit locations.
unit_info = pd.DataFrame({
'unit': ['A', 'B'],
'region': ['NSW', 'NSW']
})
print(unit_info)
# unit region
# 0 A NSW
# 1 B NSW
# The demand in the region\s being dispatched.
demand = pd.DataFrame({
'region': ['NSW'],
'demand': [195.0] # MW
})
print(demand)
# region demand
# 0 NSW 195.0
# FCAS requirement in the region\s being dispatched.
fcas_requirements = pd.DataFrame({
'set': ['nsw_regulation_requirement', 'nsw_raise_6s_requirement'],
'region': ['NSW', 'NSW'],
'service': ['raise_reg', 'raise_6s'],
'volume': [10.0, 10.0] # MW
})
print(fcas_requirements)
# set region service volume
# 0 nsw_regulation_requirement NSW raise_reg 10.0
# 1 nsw_raise_6s_requirement NSW raise_6s 10.0
# Create the market model with unit service bids.
market = markets.SpotMarket(unit_info=unit_info,
market_regions=['NSW'])
market.set_unit_volume_bids(volume_bids)
market.set_unit_price_bids(price_bids)
# Create constraints that enforce the top of the FCAS trapezium.
fcas_availability = fcas_trapeziums.loc[:, ['unit', 'service', 'max_availability']]
market.set_fcas_max_availability(fcas_availability)
# Create constraints that enforce the lower and upper slope of the FCAS regulation
# service trapeziums.
regulation_trapeziums = fcas_trapeziums[fcas_trapeziums['service'] == 'raise_reg']
market.set_energy_and_regulation_capacity_constraints(regulation_trapeziums)
# Create constraints that enforce the lower and upper slope of the FCAS contingency
# trapezium. These constrains also scale slopes of the trapezium to ensure the
# co-dispatch of contingency and regulation services is technically feasible.
contingency_trapeziums = fcas_trapeziums[fcas_trapeziums['service'] == 'raise_6s']
market.set_joint_capacity_constraints(contingency_trapeziums)
# Set the demand for energy.
market.set_demand_constraints(demand)
# Set the required volume of FCAS services.
market.set_fcas_requirements_constraints(fcas_requirements)
# Calculate dispatch and pricing
market.dispatch()
# Return the total dispatch of each unit in MW.
print(market.get_unit_dispatch())
# unit service dispatch
# 0 A energy 100.0
# 1 A raise_6s 5.0
# 2 B energy 95.0
# 3 B raise_6s 5.0
# 4 B raise_reg 10.0
# Return the price of energy.
print(market.get_energy_prices())
# region price
# 0 NSW 75.0
# Note:
# A marginal unit of energy would have to come from unit B, as unit A is fully
# dispatch, this would cost 60 $/MW/h. However, to turn unit B up, you would
# need it to dispatch less raise_6s, this would cost - 20 $/MW/h, and the
# extra FCAS would have to come from unit A, this would cost 35 $/MW/h.
# Therefore the marginal cost of energy is 60 - 20 + 35 = 75 $/MW/h
# Return the price of regulation FCAS.
print(market.get_fcas_prices())
# region service price
# 0 NSW raise_6s 35.0
# 1 NSW raise_reg 45.0
# Note:
# A marginal unit of raise_reg would have to come from unit B as it is the only
# provider, this would cost 30 $/MW/h. It would also require unit B to provide
# less raise_6s, this would cost -20 $/MW/h, extra raise_6s would then be
# required from unit A costing 35 $/MW/h. This gives a total marginal cost of
# 30 - 20 + 35 = 45 $/MW/h.
#
# A marginal unit of raise_6s would be provided by unit A at a cost of 35$/MW/h/.
|
Simple recreation of historical dispatch¶
Demonstrates using nempy to recreate historical dispatch intervals by implementing a simple energy market with unit bids, unit maximum capacity constraints and interconnector models, all sourced from historical data published by AEMO.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 | # Notice: this script downloads large volumes of historical market data from AEMO's nemweb portal.
import sqlite3
import pandas as pd
from nempy import markets
from nempy.historical_inputs import loaders, mms_db, \
xml_cache, units, demand, interconnectors
con = sqlite3.connect('market_management_system.db')
mms_db_manager = mms_db.DBManager(connection=con)
xml_cache_manager = xml_cache.XMLCacheManager('cache_directory')
# The second time this example is run on a machine this flag can
# be set to false to save downloading the data again.
down_load_inputs = True
if down_load_inputs:
# This requires approximately 5 GB of storage.
mms_db_manager.populate(start_year=2019, start_month=1,
end_year=2019, end_month=1)
# This requires approximately 60 GB of storage.
xml_cache_manager.populate(start_year=2019, start_month=1,
end_year=2019, end_month=1)
raw_inputs_loader = loaders.RawInputsLoader(
nemde_xml_cache_manager=xml_cache_manager,
market_management_system_database=mms_db_manager)
# A list of intervals we want to recreate historical dispatch for.
dispatch_intervals = ['2019/01/01 12:00:00',
'2019/01/01 12:05:00',
'2019/01/01 12:10:00',
'2019/01/01 12:15:00',
'2019/01/01 12:20:00',
'2019/01/01 12:25:00',
'2019/01/01 12:30:00']
# List for saving outputs to.
outputs = []
# Create and dispatch the spot market for each dispatch interval.
for interval in dispatch_intervals:
raw_inputs_loader.set_interval(interval)
unit_inputs = units.UnitData(raw_inputs_loader)
demand_inputs = demand.DemandData(raw_inputs_loader)
interconnector_inputs = \
interconnectors.InterconnectorData(raw_inputs_loader)
unit_info = unit_inputs.get_unit_info()
market = markets.SpotMarket(market_regions=['QLD1', 'NSW1', 'VIC1',
'SA1', 'TAS1'],
unit_info=unit_info)
volume_bids, price_bids = unit_inputs.get_processed_bids()
market.set_unit_volume_bids(volume_bids)
market.set_unit_price_bids(price_bids)
unit_bid_limit = unit_inputs.get_unit_bid_availability()
market.set_unit_bid_capacity_constraints(unit_bid_limit)
unit_uigf_limit = unit_inputs.get_unit_uigf_limits()
market.set_unconstrained_intermitent_generation_forecast_constraint(
unit_uigf_limit)
regional_demand = demand_inputs.get_operational_demand()
market.set_demand_constraints(regional_demand)
interconnectors_definitions = \
interconnector_inputs.get_interconnector_definitions()
loss_functions, interpolation_break_points = \
interconnector_inputs.get_interconnector_loss_model()
market.set_interconnectors(interconnectors_definitions)
market.set_interconnector_losses(loss_functions,
interpolation_break_points)
market.dispatch()
# Save prices from this interval
prices = market.get_energy_prices()
prices['time'] = interval
outputs.append(prices.loc[:, ['time', 'region', 'price']])
con.close()
print(pd.concat(outputs))
# time region price
# 0 2019/01/01 12:00:00 NSW1 91.857666
# 1 2019/01/01 12:00:00 QLD1 76.180429
# 2 2019/01/01 12:00:00 SA1 85.126914
# 3 2019/01/01 12:00:00 TAS1 85.948523
# 4 2019/01/01 12:00:00 VIC1 83.250703
# 0 2019/01/01 12:05:00 NSW1 88.357224
# 1 2019/01/01 12:05:00 QLD1 72.255334
# 2 2019/01/01 12:05:00 SA1 82.417720
# 3 2019/01/01 12:05:00 TAS1 83.451561
# 4 2019/01/01 12:05:00 VIC1 80.621103
# 0 2019/01/01 12:10:00 NSW1 91.857666
# 1 2019/01/01 12:10:00 QLD1 75.665675
# 2 2019/01/01 12:10:00 SA1 85.680310
# 3 2019/01/01 12:10:00 TAS1 86.715499
# 4 2019/01/01 12:10:00 VIC1 83.774337
# 0 2019/01/01 12:15:00 NSW1 88.343034
# 1 2019/01/01 12:15:00 QLD1 71.746786
# 2 2019/01/01 12:15:00 SA1 82.379539
# 3 2019/01/01 12:15:00 TAS1 83.451561
# 4 2019/01/01 12:15:00 VIC1 80.621103
# 0 2019/01/01 12:20:00 NSW1 91.864122
# 1 2019/01/01 12:20:00 QLD1 75.052319
# 2 2019/01/01 12:20:00 SA1 85.722028
# 3 2019/01/01 12:20:00 TAS1 86.576848
# 4 2019/01/01 12:20:00 VIC1 83.859306
# 0 2019/01/01 12:25:00 NSW1 91.864122
# 1 2019/01/01 12:25:00 QLD1 75.696247
# 2 2019/01/01 12:25:00 SA1 85.746024
# 3 2019/01/01 12:25:00 TAS1 86.613642
# 4 2019/01/01 12:25:00 VIC1 83.894945
# 0 2019/01/01 12:30:00 NSW1 91.870167
# 1 2019/01/01 12:30:00 QLD1 75.188735
# 2 2019/01/01 12:30:00 SA1 85.694071
# 3 2019/01/01 12:30:00 TAS1 86.560602
# 4 2019/01/01 12:30:00 VIC1 83.843570
|
Detailed recreation of historical dispatch¶
Demonstrates using nempy to recreate historical dispatch intervals by implementing a simple energy market using all the features of the nempy market model, all inputs sourced from historical data published by AEMO. Note each interval is dispatched as a standalone simulation and the results from one dispatch interval are not carried over to be the initial conditions of the next interval, rather the historical initial conditions are always used.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 | # Notice: this script downloads large volumes of historical market data from AEMO's nemweb portal.
import sqlite3
import pandas as pd
from nempy import markets
from nempy.historical_inputs import loaders, mms_db, \
xml_cache, units, demand, interconnectors, \
constraints
con = sqlite3.connect('market_management_system.db')
mms_db_manager = mms_db.DBManager(connection=con)
xml_cache_manager = xml_cache.XMLCacheManager('cache_directory')
# The second time this example is run on a machine this flag can
# be set to false to save downloading the data again.
down_load_inputs = True
if down_load_inputs:
# This requires approximately 5 GB of storage.
mms_db_manager.populate(start_year=2019, start_month=1,
end_year=2019, end_month=1)
# This requires approximately 60 GB of storage.
xml_cache_manager.populate(start_year=2019, start_month=1,
end_year=2019, end_month=1)
raw_inputs_loader = loaders.RawInputsLoader(
nemde_xml_cache_manager=xml_cache_manager,
market_management_system_database=mms_db_manager)
# A list of intervals we want to recreate historical dispatch for.
dispatch_intervals = ['2019/01/01 12:00:00',
'2019/01/01 12:05:00',
'2019/01/01 12:10:00',
'2019/01/01 12:15:00',
'2019/01/01 12:20:00',
'2019/01/01 12:25:00',
'2019/01/01 12:30:00']
# List for saving outputs to.
outputs = []
# Create and dispatch the spot market for each dispatch interval.
for interval in dispatch_intervals:
raw_inputs_loader.set_interval(interval)
unit_inputs = units.UnitData(raw_inputs_loader)
interconnector_inputs = interconnectors.InterconnectorData(raw_inputs_loader)
constraint_inputs = constraints.ConstraintData(raw_inputs_loader)
demand_inputs = demand.DemandData(raw_inputs_loader)
unit_info = unit_inputs.get_unit_info()
market = markets.SpotMarket(market_regions=['QLD1', 'NSW1', 'VIC1',
'SA1', 'TAS1'],
unit_info=unit_info)
# Set bids
volume_bids, price_bids = unit_inputs.get_processed_bids()
market.set_unit_volume_bids(volume_bids)
market.set_unit_price_bids(price_bids)
# Set bid in capacity limits
unit_bid_limit = unit_inputs.get_unit_bid_availability()
market.set_unit_bid_capacity_constraints(unit_bid_limit)
cost = constraint_inputs.get_constraint_violation_prices()['unit_capacity']
market.make_constraints_elastic('unit_bid_capacity', violation_cost=cost)
# Set limits provided by the unconstrained intermittent generation
# forecasts. Primarily for wind and solar.
unit_uigf_limit = unit_inputs.get_unit_uigf_limits()
market.set_unconstrained_intermitent_generation_forecast_constraint(
unit_uigf_limit)
cost = constraint_inputs.get_constraint_violation_prices()['uigf']
market.make_constraints_elastic('uigf_capacity', violation_cost=cost)
# Set unit ramp rates.
ramp_rates = unit_inputs.get_ramp_rates_used_for_energy_dispatch()
market.set_unit_ramp_up_constraints(
ramp_rates.loc[:, ['unit', 'initial_output', 'ramp_up_rate']])
market.set_unit_ramp_down_constraints(
ramp_rates.loc[:, ['unit', 'initial_output', 'ramp_down_rate']])
cost = constraint_inputs.get_constraint_violation_prices()['ramp_rate']
market.make_constraints_elastic('ramp_up', violation_cost=cost)
market.make_constraints_elastic('ramp_down', violation_cost=cost)
# Set unit FCAS trapezium constraints.
unit_inputs.add_fcas_trapezium_constraints()
cost = constraint_inputs.get_constraint_violation_prices()['fcas_max_avail']
fcas_availability = unit_inputs.get_fcas_max_availability()
market.set_fcas_max_availability(fcas_availability)
market.make_constraints_elastic('fcas_max_availability', cost)
cost = constraint_inputs.get_constraint_violation_prices()['fcas_profile']
regulation_trapeziums = unit_inputs.get_fcas_regulation_trapeziums()
market.set_energy_and_regulation_capacity_constraints(regulation_trapeziums)
market.make_constraints_elastic('energy_and_regulation_capacity', cost)
scada_ramp_down_rates = unit_inputs.get_scada_ramp_down_rates_of_lower_reg_units()
market.set_joint_ramping_constraints_lower_reg(scada_ramp_down_rates)
market.make_constraints_elastic('joint_ramping_lower_reg', cost)
scada_ramp_up_rates = unit_inputs.get_scada_ramp_up_rates_of_raise_reg_units()
market.set_joint_ramping_constraints_raise_reg(scada_ramp_up_rates)
market.make_constraints_elastic('joint_ramping_raise_reg', cost)
contingency_trapeziums = unit_inputs.get_contingency_services()
market.set_joint_capacity_constraints(contingency_trapeziums)
market.make_constraints_elastic('joint_capacity', cost)
# Set interconnector definitions, limits and loss models.
interconnectors_definitions = \
interconnector_inputs.get_interconnector_definitions()
loss_functions, interpolation_break_points = \
interconnector_inputs.get_interconnector_loss_model()
market.set_interconnectors(interconnectors_definitions)
market.set_interconnector_losses(loss_functions,
interpolation_break_points)
# Add generic constraints and FCAS market constraints.
fcas_requirements = constraint_inputs.get_fcas_requirements()
market.set_fcas_requirements_constraints(fcas_requirements)
violation_costs = constraint_inputs.get_violation_costs()
market.make_constraints_elastic('fcas', violation_cost=violation_costs)
generic_rhs = constraint_inputs.get_rhs_and_type_excluding_regional_fcas_constraints()
market.set_generic_constraints(generic_rhs)
market.make_constraints_elastic('generic', violation_cost=violation_costs)
unit_generic_lhs = constraint_inputs.get_unit_lhs()
market.link_units_to_generic_constraints(unit_generic_lhs)
interconnector_generic_lhs = constraint_inputs.get_interconnector_lhs()
market.link_interconnectors_to_generic_constraints(
interconnector_generic_lhs)
# Set the operational demand to be met by dispatch.
regional_demand = demand_inputs.get_operational_demand()
market.set_demand_constraints(regional_demand)
# Get unit dispatch without fast start constraints and use it to
# make fast start unit commitment decisions.
market.dispatch()
dispatch = market.get_unit_dispatch()
fast_start_profiles = unit_inputs.get_fast_start_profiles_for_dispatch(dispatch)
market.set_fast_start_constraints(fast_start_profiles)
if 'fast_start' in market.get_constraint_set_names():
cost = constraint_inputs.get_constraint_violation_prices()['fast_start']
market.make_constraints_elastic('fast_start', violation_cost=cost)
# If AEMO historical used the over constrained dispatch rerun
# process then allow it to be used in dispatch. This is needed
# because sometimes the conditions for over constrained dispatch
# are present but the rerun process isn't used.
if constraint_inputs.is_over_constrained_dispatch_rerun():
market.dispatch(allow_over_constrained_dispatch_re_run=True,
energy_market_floor_price=-1000.0,
energy_market_ceiling_price=14500.0,
fcas_market_ceiling_price=1000.0)
else:
# The market price ceiling and floor are not needed here
# because they are only used for the over constrained
# dispatch rerun process.
market.dispatch(allow_over_constrained_dispatch_re_run=False)
# Save prices from this interval
prices = market.get_energy_prices()
prices['time'] = interval
outputs.append(prices.loc[:, ['time', 'region', 'price']])
con.close()
print(pd.concat(outputs))
# time region price
# 0 2019/01/01 12:00:00 NSW1 91.870167
# 1 2019/01/01 12:00:00 QLD1 76.190796
# 2 2019/01/01 12:00:00 SA1 86.899534
# 3 2019/01/01 12:00:00 TAS1 89.805037
# 4 2019/01/01 12:00:00 VIC1 84.984255
# 0 2019/01/01 12:05:00 NSW1 91.870496
# 1 2019/01/01 12:05:00 QLD1 64.991736
# 2 2019/01/01 12:05:00 SA1 87.462599
# 3 2019/01/01 12:05:00 TAS1 90.178036
# 4 2019/01/01 12:05:00 VIC1 85.556009
# 0 2019/01/01 12:10:00 NSW1 91.870496
# 1 2019/01/01 12:10:00 QLD1 64.991736
# 2 2019/01/01 12:10:00 SA1 86.868556
# 3 2019/01/01 12:10:00 TAS1 89.983716
# 4 2019/01/01 12:10:00 VIC1 84.936150
# 0 2019/01/01 12:15:00 NSW1 91.870496
# 1 2019/01/01 12:15:00 QLD1 64.776456
# 2 2019/01/01 12:15:00 SA1 86.844540
# 3 2019/01/01 12:15:00 TAS1 89.582288
# 4 2019/01/01 12:15:00 VIC1 84.990796
# 0 2019/01/01 12:20:00 NSW1 91.870496
# 1 2019/01/01 12:20:00 QLD1 64.776456
# 2 2019/01/01 12:20:00 SA1 87.496112
# 3 2019/01/01 12:20:00 TAS1 90.291144
# 4 2019/01/01 12:20:00 VIC1 85.594840
# 0 2019/01/01 12:25:00 NSW1 91.870167
# 1 2019/01/01 12:25:00 QLD1 64.991736
# 2 2019/01/01 12:25:00 SA1 87.519993
# 3 2019/01/01 12:25:00 TAS1 90.488064
# 4 2019/01/01 12:25:00 VIC1 85.630617
# 0 2019/01/01 12:30:00 NSW1 91.870496
# 1 2019/01/01 12:30:00 QLD1 64.991736
# 2 2019/01/01 12:30:00 SA1 87.462000
# 3 2019/01/01 12:30:00 TAS1 90.196284
# 4 2019/01/01 12:30:00 VIC1 85.573321
|
Time sequential recreation of historical dispatch¶
Demonstrates using nempy to recreate historical dispatch in a dynamic or time sequential manner, this means the outputs of one interval become the initial conditions for the next dispatch interval. Note, currently there is not the infrastructure in place to include features such as generic constraints in the time sequential model as the rhs values of many constraints would need to be re-calculated based on the dynamic system state. Similarly, using historical bids in this example is some what problematic as participants also dynamically change their bids based on market conditions. However, for sake of demonstrating how nempy can be used to create time sequential models, historical bids are used in this example.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 | # Notice: this script downloads large volumes of historical market data from AEMO's nemweb portal.
import sqlite3
import pandas as pd
from nempy import markets, time_sequential
from nempy.historical_inputs import loaders, mms_db, \
xml_cache, units, demand, interconnectors, constraints
con = sqlite3.connect('market_management_system.db')
mms_db_manager = mms_db.DBManager(connection=con)
xml_cache_manager = xml_cache.XMLCacheManager('cache_directory')
# The second time this example is run on a machine this flag can
# be set to false to save downloading the data again.
down_load_inputs = False
if down_load_inputs:
# This requires approximately 5 GB of storage.
mms_db_manager.populate(start_year=2019, start_month=1,
end_year=2019, end_month=1)
# This requires approximately 60 GB of storage.
xml_cache_manager.populate(start_year=2019, start_month=1,
end_year=2019, end_month=1)
raw_inputs_loader = loaders.RawInputsLoader(
nemde_xml_cache_manager=xml_cache_manager,
market_management_system_database=mms_db_manager)
# A list of intervals we want to recreate historical dispatch for.
dispatch_intervals = ['2019/01/01 12:00:00',
'2019/01/01 12:05:00',
'2019/01/01 12:10:00',
'2019/01/01 12:15:00',
'2019/01/01 12:20:00',
'2019/01/01 12:25:00',
'2019/01/01 12:30:00']
# List for saving outputs to.
outputs = []
unit_dispatch = None
# Create and dispatch the spot market for each dispatch interval.
for interval in dispatch_intervals:
print(interval)
raw_inputs_loader.set_interval(interval)
unit_inputs = units.UnitData(raw_inputs_loader)
demand_inputs = demand.DemandData(raw_inputs_loader)
interconnector_inputs = \
interconnectors.InterconnectorData(raw_inputs_loader)
constraint_inputs = \
constraints.ConstraintData(raw_inputs_loader)
unit_info = unit_inputs.get_unit_info()
market = markets.SpotMarket(market_regions=['QLD1', 'NSW1', 'VIC1',
'SA1', 'TAS1'],
unit_info=unit_info)
volume_bids, price_bids = unit_inputs.get_processed_bids()
market.set_unit_volume_bids(volume_bids)
market.set_unit_price_bids(price_bids)
violation_cost = \
constraint_inputs.get_constraint_violation_prices()['unit_capacity']
unit_bid_limit = unit_inputs.get_unit_bid_availability()
market.set_unit_bid_capacity_constraints(unit_bid_limit)
market.make_constraints_elastic('unit_bid_capacity', violation_cost)
unit_uigf_limit = unit_inputs.get_unit_uigf_limits()
market.set_unconstrained_intermitent_generation_forecast_constraint(
unit_uigf_limit)
ramp_rates = unit_inputs.get_as_bid_ramp_rates()
# This is the part that makes it time sequential.
if unit_dispatch is None:
# For the first dispatch interval we use historical values
# as initial conditions.
historical_dispatch = unit_inputs.get_initial_unit_output()
ramp_rates = time_sequential.create_seed_ramp_rate_parameters(
historical_dispatch, ramp_rates)
else:
# For subsequent dispatch intervals we use the output levels
# determined by the last dispatch as the new initial conditions
ramp_rates = time_sequential.construct_ramp_rate_parameters(
unit_dispatch, ramp_rates)
violation_cost = \
constraint_inputs.get_constraint_violation_prices()['ramp_rate']
market.set_unit_ramp_up_constraints(
ramp_rates.loc[:, ['unit', 'initial_output', 'ramp_up_rate']])
market.make_constraints_elastic('ramp_up', violation_cost)
market.set_unit_ramp_down_constraints(
ramp_rates.loc[:, ['unit', 'initial_output', 'ramp_down_rate']])
market.make_constraints_elastic('ramp_down', violation_cost)
regional_demand = demand_inputs.get_operational_demand()
market.set_demand_constraints(regional_demand)
interconnectors_definitions = \
interconnector_inputs.get_interconnector_definitions()
loss_functions, interpolation_break_points = \
interconnector_inputs.get_interconnector_loss_model()
market.set_interconnectors(interconnectors_definitions)
market.set_interconnector_losses(loss_functions,
interpolation_break_points)
market.dispatch()
# Save prices from this interval
prices = market.get_energy_prices()
prices['time'] = interval
outputs.append(prices.loc[:, ['time', 'region', 'price']])
unit_dispatch = market.get_unit_dispatch()
con.close()
print(pd.concat(outputs))
# time region price
# 0 2019/01/01 12:00:00 NSW1 91.857666
# 1 2019/01/01 12:00:00 QLD1 76.180429
# 2 2019/01/01 12:00:00 SA1 85.126914
# 3 2019/01/01 12:00:00 TAS1 85.948523
# 4 2019/01/01 12:00:00 VIC1 83.250703
# 0 2019/01/01 12:05:00 NSW1 88.357224
# 1 2019/01/01 12:05:00 QLD1 72.255334
# 2 2019/01/01 12:05:00 SA1 82.417720
# 3 2019/01/01 12:05:00 TAS1 83.451561
# 4 2019/01/01 12:05:00 VIC1 80.621103
# 0 2019/01/01 12:10:00 NSW1 91.857666
# 1 2019/01/01 12:10:00 QLD1 75.665675
# 2 2019/01/01 12:10:00 SA1 85.680310
# 3 2019/01/01 12:10:00 TAS1 86.715499
# 4 2019/01/01 12:10:00 VIC1 83.774337
# 0 2019/01/01 12:15:00 NSW1 88.343034
# 1 2019/01/01 12:15:00 QLD1 71.746786
# 2 2019/01/01 12:15:00 SA1 82.379539
# 3 2019/01/01 12:15:00 TAS1 83.451561
# 4 2019/01/01 12:15:00 VIC1 80.621103
# 0 2019/01/01 12:20:00 NSW1 91.864122
# 1 2019/01/01 12:20:00 QLD1 75.052319
# 2 2019/01/01 12:20:00 SA1 85.722028
# 3 2019/01/01 12:20:00 TAS1 86.576848
# 4 2019/01/01 12:20:00 VIC1 83.859306
# 0 2019/01/01 12:25:00 NSW1 91.864122
# 1 2019/01/01 12:25:00 QLD1 75.696247
# 2 2019/01/01 12:25:00 SA1 85.746024
# 3 2019/01/01 12:25:00 TAS1 86.613642
# 4 2019/01/01 12:25:00 VIC1 83.894945
# 0 2019/01/01 12:30:00 NSW1 91.870167
# 1 2019/01/01 12:30:00 QLD1 75.188735
# 2 2019/01/01 12:30:00 SA1 85.694071
# 3 2019/01/01 12:30:00 TAS1 86.560602
# 4 2019/01/01 12:30:00 VIC1 83.843570
|