Source code for tests.test_optimisation

  1"""test ``h2ss.optimisation`` functions."""
  2
  3import geopandas as gpd
  4import numpy as np
  5import pandas as pd
  6from geopandas.testing import assert_geodataframe_equal, assert_geoseries_equal
  7from pandas.testing import assert_frame_equal
  8from scipy import integrate
  9from shapely.geometry import Point, Polygon
 10
 11from h2ss import data as rd
 12from h2ss import optimisation as opt
 13
 14
[docs] 15def test_ref_power_curve(): 16 """Test ``h2ss.optimisation.ref_power_curve``""" 17 wind_speeds = list(range(31)) 18 power_curve = ( 19 [0] * 4 20 + [0.499, 1.424, 2.732, 4.469, 6.643, 9.459, 12.975] 21 + [15] * 15 22 + [0] * 5 23 ) 24 for v, p in zip(wind_speeds, power_curve): 25 assert round(opt.ref_power_curve(v=v), 3) == p 26 assert isinstance(opt.ref_power_curve(v=v), (float, int)) 27 power_coeff = opt.power_coefficient(v=v) * opt.power_wind_resource(v=v) 28 assert round(power_coeff, 10) == round(opt.ref_power_curve(v=v), 10)
29 30
[docs] 31def test_weibull_probability_distribution(): 32 """Test ``h2ss.optimisation.weibull_probability_distribution``""" 33 k_vals = [1.4, 1.7, 2.0, 2.15, 2.25] 34 c_vals = [5.1, 9.2, 10.4, 8, 12] 35 v_vals = [0, 7.2, 11.7, 14.6, 28, 20] 36 weibull = [] 37 weibull_func = [] 38 for k, c, v in zip(k_vals, c_vals, v_vals): 39 weibull.append( 40 k / c * ((v / c) ** (k - 1)) * np.e ** (-((v / c) ** k)) 41 ) 42 weibull_func.append( 43 opt.weibull_probability_distribution(k=k, c=c, v=v) 44 ) 45 weibull = [round(x, 10) for x in weibull] 46 weibull_func = [round(x, 10) for x in weibull_func] 47 assert weibull_func == weibull
48 49
[docs] 50def test_number_of_turbines(): 51 """Test ``h2ss.optimisation.number_of_turbines``""" 52 owf_cap_list = [500, 600, 700, 800, 900] 53 wt_power_list = [10, 11, 12, 13, 14] 54 n_turbines = [] 55 n_turbines_func = [] 56 for owf_cap, wt_power in zip(owf_cap_list, wt_power_list): 57 n_turbines_func.append( 58 opt.number_of_turbines( 59 owf_cap=np.array(owf_cap), wt_power=np.array(wt_power) 60 ) 61 ) 62 n_turbines.append(int(owf_cap / wt_power)) 63 assert n_turbines_func == n_turbines
64 65
[docs] 66def integrate_lambda(k, c): 67 """Lambda function for the quad integration test below""" 68 return lambda v: ( 69 opt.ref_power_curve(v=v) 70 * opt.weibull_probability_distribution(k=k, c=c, v=v) 71 )
72 73
[docs] 74def test_annual_energy_production_function(): 75 """Test ``h2ss.optimisation.annual_energy_production_function``""" 76 n_turbines = [50, 65, 80, 95] 77 k_vals = [1.4, 1.7, 1.9, 2.0, 2.15] 78 c_vals = [5.1, 9.2, 11, 10.4, 8] 79 cut_in = 3 80 cut_out = 25 81 w_loss = 0.1 82 acdc_loss = 0.982 83 aep = [] 84 aep_func = [] 85 for n, k, c in zip(n_turbines, k_vals, c_vals): 86 aep_func.append( 87 opt.annual_energy_production_function(n_turbines=n, k=k, c=c)[0] 88 ) 89 integration = integrate.quad( 90 integrate_lambda(k=k, c=c), 91 a=cut_in, 92 b=cut_out, 93 limit=100, # an upper bound on the number of subintervals used 94 epsabs=1.49e-6, # absolute error tolerance 95 ) 96 aep.append(365 * 24 * n * (1 - w_loss) * acdc_loss * integration[0]) 97 assert aep_func == aep
98 99
[docs] 100def test_annual_hydrogen_production(): 101 """Test ``h2ss.optimisation.annual_hydrogen_production``""" 102 aep = 15e6 103 eta_conv = 0.6 104 e_pcl = 0.004 105 assert opt.annual_hydrogen_production( 106 aep=aep, eta_conv=eta_conv, e_pcl=e_pcl 107 ) == aep / (119.96 / (3600 * eta_conv) + e_pcl)
108 109
[docs] 110def test_transmission_distance(): 111 """Test ``h2ss.optimisation.transmission_distance``""" 112 wf_data = gpd.GeoDataFrame( 113 { 114 "name": ["Dublin Array", "Codling"], 115 "geometry": [ 116 Polygon( 117 [ 118 (10.0, 64.0), 119 (10.0, 63.9), 120 (10.1, 63.9), 121 (10.1, 64.0), 122 (10.0, 64.0), 123 ] 124 ), 125 Polygon( 126 [ 127 (10.2, 64.2), 128 (10.2, 63.8), 129 (10.2, 63.8), 130 (10.2, 64.2), 131 (10.2, 64.2), 132 ] 133 ), 134 ], 135 }, 136 crs=4326, 137 ).to_crs(rd.CRS) 138 cavern_df = gpd.GeoDataFrame( 139 { 140 "geometry": [ 141 Point([11.0, 65.0]), 142 Point([11.1, 65.1]), 143 Point([11.2, 65.2]), 144 Point([11.3, 65.3]), 145 Point([11.4, 65.4]), 146 ] 147 }, 148 crs=4326, 149 ).to_crs(rd.CRS) 150 injection_point_coords = (12, 10, 66, 30) 151 lond, lonm, latd, latm = injection_point_coords 152 injection_point = ( 153 gpd.GeoSeries( 154 [ 155 Point(lond + lonm / 60, latd + latm / 60), 156 Point(lond + lonm / 60, latd + latm / 60), 157 ], 158 crs=4326, 159 ) 160 .to_crs(rd.CRS) 161 .drop_duplicates() 162 ) 163 distance_ip = [] 164 for j in list(cavern_df["geometry"]): 165 distance_ip.append(injection_point.distance(j, align=False) / 1000) 166 cavern_df["distance_ip"] = distance_ip 167 distance_wf = {} 168 for i, g in zip(list(wf_data["name"]), list(wf_data["geometry"])): 169 distance_wf[i] = [] 170 for j, k in zip( 171 list(cavern_df["geometry"]), list(cavern_df["distance_ip"]) 172 ): 173 distance_wf[i].append(g.distance(j) / 1000 + k) 174 cavern_df[f"dist_{i.replace(' ', '_')}"] = distance_wf[i] 175 cavern_df_func, injection_point_func = opt.transmission_distance( 176 cavern_df=cavern_df, 177 wf_data=wf_data, 178 injection_point_coords=injection_point_coords, 179 ) 180 assert_geodataframe_equal(cavern_df_func, cavern_df) 181 assert_geoseries_equal(injection_point_func, injection_point)
182 183
[docs] 184def test_electrolyser_capacity(): 185 """Test ``h2ss.optimisation.electrolyser_capacity``""" 186 n_turbines_list = [25, 50, 75, 100, 125] 187 cap_ratio_list = [0.9, 0.88, 0.86, 0.84, 0.82] 188 e_cap = [] 189 e_cap_func = [] 190 for n_turbines, cap in zip(n_turbines_list, cap_ratio_list): 191 e_cap_func.append( 192 opt.electrolyser_capacity( 193 n_turbines=np.array(n_turbines), cap_ratio=np.array(cap) 194 ) 195 ) 196 owf_cap = n_turbines * opt.REF_RATED_POWER 197 e_cap.append(int(owf_cap * cap)) 198 assert e_cap_func == e_cap
199 200
[docs] 201def test_capex_pipeline(): 202 """Test ``h2ss.optimisation.capex_pipeline``""" 203 e_cap = 500 204 p_rate = 0.006 205 capex = 2 * ( 206 16000000 * e_cap * p_rate / (8.060934075639166 * 15 * np.pi) 207 + 1197200 * np.sqrt(e_cap * p_rate / (8.060934075639166 * 15 * np.pi)) 208 + 329000 209 ) 210 assert round(opt.capex_pipeline(e_cap=e_cap, p_rate=p_rate), 8) == round( 211 capex, 8 212 )
213 214
[docs] 215def test_lcot_pipeline_function(): 216 """Test ``h2ss.optimisation.lcot_pipeline_function``""" 217 capex = 1000 218 transmission_distance = 100 219 ahp = 500 220 opex_ratio = 0.03 221 discount_rate = 0.05 222 lifetime = 40 223 opex = capex * opex_ratio 224 lcot = ( 225 ( 226 capex 227 + sum( 228 opex / np.power((1 + discount_rate), year) 229 for year in range(lifetime + 1) 230 ) 231 ) 232 * transmission_distance 233 / sum( 234 ahp / np.power((1 + discount_rate), year) 235 for year in range(lifetime + 1) 236 ) 237 ) 238 lcot_func = opt.lcot_pipeline_function( 239 capex=capex, 240 d_transmission=transmission_distance, 241 ahp=ahp, 242 opex_ratio=opex_ratio, 243 discount_rate=discount_rate, 244 lifetime=lifetime, 245 ) 246 assert lcot_func == lcot
247 248
[docs] 249def test_lcot_pipeline(): 250 """Test ``h2ss.optimisation.lcot_pipeline``""" 251 weibull_wf_data = pd.DataFrame( 252 { 253 "name": ["Dublin Array", "Codling", "NISA"], 254 "CAPEX": [1000, 2000, 3000], 255 "AHP": [8000, 9000, 10000], 256 } 257 ) 258 cavern_df = pd.DataFrame( 259 { 260 "dist_Dublin_Array": [x + 2 for x in range(20)], 261 "dist_Codling": [x + 3 for x in range(20)], 262 "dist_NISA": [x + 4 for x in range(20)], 263 } 264 ) 265 266 for wf, capex, ahp in zip( 267 list(weibull_wf_data["name"]), 268 list(weibull_wf_data["CAPEX"]), 269 list(weibull_wf_data["AHP"]), 270 ): 271 cavern_df[f"LCOT_{wf.replace(' ', '_')}"] = opt.lcot_pipeline_function( 272 capex=capex, 273 d_transmission=cavern_df[f"dist_{wf.replace(' ', '_')}"], 274 ahp=ahp, 275 ) 276 277 cavern_df_func = opt.lcot_pipeline( 278 weibull_wf_data=weibull_wf_data, cavern_df=cavern_df 279 ) 280 assert_frame_equal(cavern_df_func, cavern_df)