Publications
Links to publications and associate source code.
Nempy Technical Brief
The nempy technical brief is available here
.
as pdf, and is also used as the introduction for readthedocs page. The code below uses Nempy v1.1.0 which is
now superseded v2.0.0.
Source code for Figure 1
1# Notice:
2# - This script downloads large volumes of historical market data from AEMO's nemweb
3# portal. The boolean on line 20 can be changed to prevent this happening repeatedly
4# once the data has been downloaded.
5
6import sqlite3
7import pandas as pd
8import random
9from datetime import datetime, timedelta
10
11from nempy import markets
12from nempy.historical_inputs import loaders, mms_db, \
13 xml_cache, units, demand, interconnectors, \
14 constraints
15
16# The size of historical data files for a full year of 5 min dispatch
17# is very large, approximately 800 GB, for this reason the data is
18# stored on an external SSD.
19con = sqlite3.connect('F:/nempy_test_files/historical_mms.db')
20mms_db_manager = mms_db.DBManager(connection=con)
21xml_cache_manager = xml_cache.XMLCacheManager('F:/nempy_test_files/nemde_cache')
22
23# The second time this example is run on a machine this flag can
24# be set to false to save downloading the data again.
25download_inputs = False
26
27if download_inputs:
28 mms_db_manager.populate(start_year=2019, start_month=1,
29 end_year=2019, end_month=12)
30 xml_cache_manager.populate(start_year=2019, start_month=1,
31 end_year=2020, end_month=1)
32
33raw_inputs_loader = loaders.RawInputsLoader(
34 nemde_xml_cache_manager=xml_cache_manager,
35 market_management_system_database=mms_db_manager)
36
37
38# Define a function for creating a list of randomly selected dispatch
39# intervals
40def get_test_intervals(number):
41 start_time = datetime(year=2019, month=1, day=1, hour=0, minute=0)
42 end_time = datetime(year=2019, month=12, day=31, hour=0, minute=0)
43 difference = end_time - start_time
44 difference_in_5_min_intervals = difference.days * 12 * 24
45 random.seed(1)
46 intervals = random.sample(range(1, difference_in_5_min_intervals), number)
47 times = [start_time + timedelta(minutes=5 * i) for i in intervals]
48 times_formatted = [t.isoformat().replace('T', ' ').replace('-', '/') for t in times]
49 return times_formatted
50
51
52# List for saving outputs to.
53outputs = []
54
55# Create and dispatch the spot market for each dispatch interval.
56for interval in get_test_intervals(number=1000):
57 raw_inputs_loader.set_interval(interval)
58 unit_inputs = units.UnitData(raw_inputs_loader)
59 interconnector_inputs = interconnectors.InterconnectorData(raw_inputs_loader)
60 constraint_inputs = constraints.ConstraintData(raw_inputs_loader)
61 demand_inputs = demand.DemandData(raw_inputs_loader)
62
63 unit_info = unit_inputs.get_unit_info()
64 market = markets.SpotMarket(market_regions=['QLD1', 'NSW1', 'VIC1',
65 'SA1', 'TAS1'],
66 unit_info=unit_info)
67
68 # By default the CBC open source solver is used, but GUROBI is
69 # also supported
70 market.solver_name = 'CBC' # or could be 'GUROBI'
71
72 # Set bids
73 volume_bids, price_bids = unit_inputs.get_processed_bids()
74 market.set_unit_volume_bids(volume_bids)
75 market.set_unit_price_bids(price_bids)
76
77 # Set bid in capacity limits
78 unit_bid_limit = unit_inputs.get_unit_bid_availability()
79 market.set_unit_bid_capacity_constraints(unit_bid_limit)
80 cost = constraint_inputs.get_constraint_violation_prices()['unit_capacity']
81 market.make_constraints_elastic('unit_bid_capacity', violation_cost=cost)
82
83 # Set limits provided by the unconstrained intermittent generation
84 # forecasts. Primarily for wind and solar.
85 unit_uigf_limit = unit_inputs.get_unit_uigf_limits()
86 market.set_unconstrained_intermitent_generation_forecast_constraint(
87 unit_uigf_limit)
88 cost = constraint_inputs.get_constraint_violation_prices()['uigf']
89 market.make_constraints_elastic('uigf_capacity', violation_cost=cost)
90
91 # Set unit ramp rates.
92 ramp_rates = unit_inputs.get_ramp_rates_used_for_energy_dispatch()
93 market.set_unit_ramp_up_constraints(
94 ramp_rates.loc[:, ['unit', 'initial_output', 'ramp_up_rate']])
95 market.set_unit_ramp_down_constraints(
96 ramp_rates.loc[:, ['unit', 'initial_output', 'ramp_down_rate']])
97 cost = constraint_inputs.get_constraint_violation_prices()['ramp_rate']
98 market.make_constraints_elastic('ramp_up', violation_cost=cost)
99 market.make_constraints_elastic('ramp_down', violation_cost=cost)
100
101 # Set unit FCAS trapezium constraints.
102 unit_inputs.add_fcas_trapezium_constraints()
103 cost = constraint_inputs.get_constraint_violation_prices()['fcas_max_avail']
104 fcas_availability = unit_inputs.get_fcas_max_availability()
105 market.set_fcas_max_availability(fcas_availability)
106 market.make_constraints_elastic('fcas_max_availability', cost)
107 cost = constraint_inputs.get_constraint_violation_prices()['fcas_profile']
108 regulation_trapeziums = unit_inputs.get_fcas_regulation_trapeziums()
109 market.set_energy_and_regulation_capacity_constraints(regulation_trapeziums)
110 market.make_constraints_elastic('energy_and_regulation_capacity', cost)
111 scada_ramp_down_rates = unit_inputs.get_scada_ramp_down_rates_of_lower_reg_units()
112 market.set_joint_ramping_constraints_lower_reg(scada_ramp_down_rates)
113 market.make_constraints_elastic('joint_ramping_lower_reg', cost)
114 scada_ramp_up_rates = unit_inputs.get_scada_ramp_up_rates_of_raise_reg_units()
115 market.set_joint_ramping_constraints_raise_reg(scada_ramp_up_rates)
116 market.make_constraints_elastic('joint_ramping_raise_reg', cost)
117 contingency_trapeziums = unit_inputs.get_contingency_services()
118 market.set_joint_capacity_constraints(contingency_trapeziums)
119 market.make_constraints_elastic('joint_capacity', cost)
120
121 # Set interconnector definitions, limits and loss models.
122 interconnectors_definitions = \
123 interconnector_inputs.get_interconnector_definitions()
124 loss_functions, interpolation_break_points = \
125 interconnector_inputs.get_interconnector_loss_model()
126 market.set_interconnectors(interconnectors_definitions)
127 market.set_interconnector_losses(loss_functions,
128 interpolation_break_points)
129
130 # Add generic constraints and FCAS market constraints.
131 fcas_requirements = constraint_inputs.get_fcas_requirements()
132 market.set_fcas_requirements_constraints(fcas_requirements)
133 violation_costs = constraint_inputs.get_violation_costs()
134 market.make_constraints_elastic('fcas', violation_cost=violation_costs)
135 generic_rhs = constraint_inputs.get_rhs_and_type_excluding_regional_fcas_constraints()
136 market.set_generic_constraints(generic_rhs)
137 market.make_constraints_elastic('generic', violation_cost=violation_costs)
138 unit_generic_lhs = constraint_inputs.get_unit_lhs()
139 market.link_units_to_generic_constraints(unit_generic_lhs)
140 interconnector_generic_lhs = constraint_inputs.get_interconnector_lhs()
141 market.link_interconnectors_to_generic_constraints(
142 interconnector_generic_lhs)
143
144 # Set the operational demand to be met by dispatch.
145 regional_demand = demand_inputs.get_operational_demand()
146 market.set_demand_constraints(regional_demand)
147 # Get unit dispatch without fast start constraints and use it to
148 # make fast start unit commitment decisions.
149 market.dispatch()
150 dispatch = market.get_unit_dispatch()
151 fast_start_profiles = unit_inputs.get_fast_start_profiles_for_dispatch(dispatch)
152 market.set_fast_start_constraints(fast_start_profiles)
153 if 'fast_start' in market.get_constraint_set_names():
154 cost = constraint_inputs.get_constraint_violation_prices()['fast_start']
155 market.make_constraints_elastic('fast_start', violation_cost=cost)
156
157 # If AEMO historical used the over constrained dispatch rerun
158 # process then allow it to be used in dispatch. This is needed
159 # because sometimes the conditions for over constrained dispatch
160 # are present but the rerun process isn't used.
161 if constraint_inputs.is_over_constrained_dispatch_rerun():
162 market.dispatch(allow_over_constrained_dispatch_re_run=True,
163 energy_market_floor_price=-1000.0,
164 energy_market_ceiling_price=14500.0,
165 fcas_market_ceiling_price=1000.0)
166 else:
167 # The market price ceiling and floor are not needed here
168 # because they are only used for the over constrained
169 # dispatch rerun process.
170 market.dispatch(allow_over_constrained_dispatch_re_run=False)
171
172 # Save prices from this interval
173 prices = market.get_energy_prices()
174 prices['time'] = interval
175
176 # Getting historical prices for comparison. Note, ROP price, which is
177 # the regional reference node price before the application of any
178 # price scaling by AEMO, is used for comparison.
179 historical_prices = mms_db_manager.DISPATCHPRICE.get_data(interval)
180
181 prices = pd.merge(prices, historical_prices,
182 left_on=['time', 'region'],
183 right_on=['SETTLEMENTDATE', 'REGIONID'])
184
185 outputs.append(
186 prices.loc[:, ['time', 'region', 'price',
187 'SETTLEMENTDATE', 'REGIONID', 'ROP']])
188
189con.close()
190outputs = pd.concat(outputs)
191outputs = outputs.sort_values('ROP')
192outputs = outputs.reset_index(drop=True)
193outputs.to_csv('energy_price_results_2019_1000_intervals.csv')
194
Source code for Figure 2
1# Notice:
2# - This script downloads large volumes of historical market data from AEMO's nemweb
3# portal. The boolean on line 20 can be changed to prevent this happening repeatedly
4# once the data has been downloaded.
5
6import sqlite3
7import pandas as pd
8import random
9from datetime import datetime, timedelta
10
11from nempy import markets
12from nempy.historical_inputs import loaders, mms_db, \
13 xml_cache, units, demand, interconnectors, \
14 constraints
15
16# The size of historical data files for a full year of 5 min dispatch
17# is very large, approximately 800 GB, for this reason the data is
18# stored on an external SSD.
19con = sqlite3.connect('historical_mms.db')
20mms_db_manager = mms_db.DBManager(connection=con)
21xml_cache_manager = xml_cache.XMLCacheManager('nemde_cache')
22
23# The second time this example is run on a machine this flag can
24# be set to false to save downloading the data again.
25download_inputs = True
26
27if download_inputs:
28 # This requires approximately 5 GB of storage.
29 mms_db_manager.populate(start_year=2019, start_month=1,
30 end_year=2019, end_month=12)
31
32 # This requires approximately 3.5 GB of storage.
33 xml_cache_manager.populate(start_year=2019, start_month=1, start_day=1,
34 end_year=2020, end_month=1, end_day=1)
35
36raw_inputs_loader = loaders.RawInputsLoader(
37 nemde_xml_cache_manager=xml_cache_manager,
38 market_management_system_database=mms_db_manager)
39
40
41# Define a function for creating a list of randomly selected dispatch
42# intervals
43def get_test_intervals(number):
44 start_time = datetime(year=2019, month=1, day=1, hour=0, minute=0)
45 end_time = datetime(year=2019, month=12, day=31, hour=0, minute=0)
46 difference = end_time - start_time
47 difference_in_5_min_intervals = difference.days * 12 * 24
48 random.seed(1)
49 intervals = random.sample(range(1, difference_in_5_min_intervals), number)
50 times = [start_time + timedelta(minutes=5 * i) for i in intervals]
51 times_formatted = [t.isoformat().replace('T', ' ').replace('-', '/') for t in times]
52 return times_formatted
53
54
55# List for saving outputs to.
56outputs = []
57
58# Create and dispatch the spot market for each dispatch interval.
59for interval in get_test_intervals(number=1000):
60 raw_inputs_loader.set_interval(interval)
61 unit_inputs = units.UnitData(raw_inputs_loader)
62 interconnector_inputs = interconnectors.InterconnectorData(raw_inputs_loader)
63 constraint_inputs = constraints.ConstraintData(raw_inputs_loader)
64 demand_inputs = demand.DemandData(raw_inputs_loader)
65
66 unit_info = unit_inputs.get_unit_info()
67 market = markets.SpotMarket(market_regions=['QLD1', 'NSW1', 'VIC1',
68 'SA1', 'TAS1'],
69 unit_info=unit_info)
70
71 # By default the CBC open source solver is used, but GUROBI is
72 # also supported
73 market.solver_name = 'CBC' # or could be 'GUROBI'
74
75 # Set bids
76 volume_bids, price_bids = unit_inputs.get_processed_bids()
77 volume_bids = volume_bids[volume_bids['service'] == 'energy']
78 price_bids = price_bids[price_bids['service'] == 'energy']
79 market.set_unit_volume_bids(volume_bids)
80 market.set_unit_price_bids(price_bids)
81
82 # Set bid in capacity limits
83 unit_bid_limit = unit_inputs.get_unit_bid_availability()
84 market.set_unit_bid_capacity_constraints(unit_bid_limit)
85 cost = constraint_inputs.get_constraint_violation_prices()['unit_capacity']
86 market.make_constraints_elastic('unit_bid_capacity', violation_cost=cost)
87
88 # Set limits provided by the unconstrained intermittent generation
89 # forecasts. Primarily for wind and solar.
90 unit_uigf_limit = unit_inputs.get_unit_uigf_limits()
91 market.set_unconstrained_intermitent_generation_forecast_constraint(
92 unit_uigf_limit)
93 cost = constraint_inputs.get_constraint_violation_prices()['uigf']
94 market.make_constraints_elastic('uigf_capacity', violation_cost=cost)
95
96 # Set unit ramp rates.
97 ramp_rates = unit_inputs.get_ramp_rates_used_for_energy_dispatch()
98 market.set_unit_ramp_up_constraints(
99 ramp_rates.loc[:, ['unit', 'initial_output', 'ramp_up_rate']])
100 market.set_unit_ramp_down_constraints(
101 ramp_rates.loc[:, ['unit', 'initial_output', 'ramp_down_rate']])
102 cost = constraint_inputs.get_constraint_violation_prices()['ramp_rate']
103 market.make_constraints_elastic('ramp_up', violation_cost=cost)
104 market.make_constraints_elastic('ramp_down', violation_cost=cost)
105
106 # Set interconnector definitions, limits and loss models.
107 interconnectors_definitions = \
108 interconnector_inputs.get_interconnector_definitions()
109 loss_functions, interpolation_break_points = \
110 interconnector_inputs.get_interconnector_loss_model()
111 market.set_interconnectors(interconnectors_definitions)
112 market.set_interconnector_losses(loss_functions,
113 interpolation_break_points)
114
115 # Set the operational demand to be met by dispatch.
116 regional_demand = demand_inputs.get_operational_demand()
117 market.set_demand_constraints(regional_demand)
118 market.dispatch()
119
120 # Save prices from this interval
121 prices = market.get_energy_prices()
122 prices['time'] = interval
123
124 # Getting historical prices for comparison. Note, ROP price, which is
125 # the regional reference node price before the application of any
126 # price scaling by AEMO, is used for comparison.
127 historical_prices = mms_db_manager.DISPATCHPRICE.get_data(interval)
128
129 prices = pd.merge(prices, historical_prices,
130 left_on=['time', 'region'],
131 right_on=['SETTLEMENTDATE', 'REGIONID'])
132
133 outputs.append(
134 prices.loc[:, ['time', 'region', 'price',
135 'SETTLEMENTDATE', 'REGIONID', 'ROP']])
136
137con.close()
138outputs = pd.concat(outputs)
139outputs = outputs.sort_values('ROP')
140outputs = outputs.reset_index(drop=True)
141outputs.to_csv('energy_price_results_2019_1000_intervals_without_FCAS_or_generic_constraints.csv')
142