Source code for tests.test_optimisation

  1"""test ``h2ss.optimisation`` functions.
  2
  3"""
  4
  5import geopandas as gpd
  6import numpy as np
  7import pandas as pd
  8from geopandas.testing import assert_geodataframe_equal, assert_geoseries_equal
  9from pandas.testing import assert_frame_equal
 10from scipy import integrate
 11from shapely.geometry import Point, Polygon
 12
 13from h2ss import data as rd
 14from h2ss import optimisation as opt
 15
 16
[docs] 17def test_ref_power_curve(): 18 """Test ``h2ss.optimisation.ref_power_curve``""" 19 wind_speeds = list(range(31)) 20 power_curve = ( 21 [0] * 4 22 + [0.499, 1.424, 2.732, 4.469, 6.643, 9.459, 12.975] 23 + [15] * 15 24 + [0] * 5 25 ) 26 for v, p in zip(wind_speeds, power_curve): 27 assert round(opt.ref_power_curve(v=v), 3) == p 28 assert isinstance(opt.ref_power_curve(v=v), (float, int)) 29 power_coeff = opt.power_coefficient(v=v) * opt.power_wind_resource(v=v) 30 assert round(power_coeff, 10) == round(opt.ref_power_curve(v=v), 10)
31 32
[docs] 33def test_weibull_probability_distribution(): 34 """Test ``h2ss.optimisation.weibull_probability_distribution``""" 35 k_vals = [1.4, 1.7, 2.0, 2.15, 2.25] 36 c_vals = [5.1, 9.2, 10.4, 8, 12] 37 v_vals = [0, 7.2, 11.7, 14.6, 28, 20] 38 weibull = [] 39 weibull_func = [] 40 for k, c, v in zip(k_vals, c_vals, v_vals): 41 weibull.append( 42 k / c * ((v / c) ** (k - 1)) * np.e ** (-((v / c) ** k)) 43 ) 44 weibull_func.append( 45 opt.weibull_probability_distribution(k=k, c=c, v=v) 46 ) 47 weibull = [round(x, 10) for x in weibull] 48 weibull_func = [round(x, 10) for x in weibull_func] 49 assert weibull_func == weibull
50 51
[docs] 52def test_number_of_turbines(): 53 """Test ``h2ss.optimisation.number_of_turbines``""" 54 owf_cap_list = [500, 600, 700, 800, 900] 55 wt_power_list = [10, 11, 12, 13, 14] 56 n_turbines = [] 57 n_turbines_func = [] 58 for owf_cap, wt_power in zip(owf_cap_list, wt_power_list): 59 n_turbines_func.append( 60 opt.number_of_turbines( 61 owf_cap=np.array(owf_cap), wt_power=np.array(wt_power) 62 ) 63 ) 64 n_turbines.append(int(owf_cap / wt_power)) 65 assert n_turbines_func == n_turbines
66 67
[docs] 68def integrate_lambda(k, c): 69 """Lambda function for the quad integration test below""" 70 return lambda v: ( 71 opt.ref_power_curve(v=v) 72 * opt.weibull_probability_distribution(k=k, c=c, v=v) 73 )
74 75
[docs] 76def test_annual_energy_production_function(): 77 """Test ``h2ss.optimisation.annual_energy_production_function``""" 78 n_turbines = [50, 65, 80, 95] 79 k_vals = [1.4, 1.7, 1.9, 2.0, 2.15] 80 c_vals = [5.1, 9.2, 11, 10.4, 8] 81 cut_in = 3 82 cut_out = 25 83 w_loss = 0.1 84 acdc_loss = 0.982 85 aep = [] 86 aep_func = [] 87 for n, k, c in zip(n_turbines, k_vals, c_vals): 88 aep_func.append( 89 opt.annual_energy_production_function(n_turbines=n, k=k, c=c)[0] 90 ) 91 integration = integrate.quad( 92 integrate_lambda(k=k, c=c), 93 a=cut_in, 94 b=cut_out, 95 limit=100, # an upper bound on the number of subintervals used 96 epsabs=1.49e-6, # absolute error tolerance 97 ) 98 aep.append(365 * 24 * n * (1 - w_loss) * acdc_loss * integration[0]) 99 assert aep_func == aep
100 101
[docs] 102def test_annual_hydrogen_production(): 103 """Test ``h2ss.optimisation.annual_hydrogen_production``""" 104 aep = 15e6 105 eta_conv = 0.6 106 e_pcl = 0.004 107 assert opt.annual_hydrogen_production( 108 aep=aep, eta_conv=eta_conv, e_pcl=e_pcl 109 ) == aep / (119.96 / (3600 * eta_conv) + e_pcl)
110 111
[docs] 112def test_transmission_distance(): 113 """Test ``h2ss.optimisation.transmission_distance``""" 114 wf_data = gpd.GeoDataFrame( 115 { 116 "name": ["Dublin Array", "Codling"], 117 "geometry": [ 118 Polygon( 119 [ 120 (10.0, 64.0), 121 (10.0, 63.9), 122 (10.1, 63.9), 123 (10.1, 64.0), 124 (10.0, 64.0), 125 ] 126 ), 127 Polygon( 128 [ 129 (10.2, 64.2), 130 (10.2, 63.8), 131 (10.2, 63.8), 132 (10.2, 64.2), 133 (10.2, 64.2), 134 ] 135 ), 136 ], 137 }, 138 crs=4326, 139 ).to_crs(rd.CRS) 140 cavern_df = gpd.GeoDataFrame( 141 { 142 "geometry": [ 143 Point([11.0, 65.0]), 144 Point([11.1, 65.1]), 145 Point([11.2, 65.2]), 146 Point([11.3, 65.3]), 147 Point([11.4, 65.4]), 148 ] 149 }, 150 crs=4326, 151 ).to_crs(rd.CRS) 152 injection_point_coords = (12, 10, 66, 30) 153 lond, lonm, latd, latm = injection_point_coords 154 injection_point = ( 155 gpd.GeoSeries( 156 [ 157 Point(lond + lonm / 60, latd + latm / 60), 158 Point(lond + lonm / 60, latd + latm / 60), 159 ], 160 crs=4326, 161 ) 162 .to_crs(rd.CRS) 163 .drop_duplicates() 164 ) 165 distance_ip = [] 166 for j in list(cavern_df["geometry"]): 167 distance_ip.append(injection_point.distance(j, align=False) / 1000) 168 cavern_df["distance_ip"] = distance_ip 169 distance_wf = {} 170 for i, g in zip(list(wf_data["name"]), list(wf_data["geometry"])): 171 distance_wf[i] = [] 172 for j, k in zip( 173 list(cavern_df["geometry"]), list(cavern_df["distance_ip"]) 174 ): 175 distance_wf[i].append(g.distance(j) / 1000 + k) 176 cavern_df[f"dist_{i.replace(' ', '_')}"] = distance_wf[i] 177 cavern_df_func, injection_point_func = opt.transmission_distance( 178 cavern_df=cavern_df, 179 wf_data=wf_data, 180 injection_point_coords=injection_point_coords, 181 ) 182 assert_geodataframe_equal(cavern_df_func, cavern_df) 183 assert_geoseries_equal(injection_point_func, injection_point)
184 185
[docs] 186def test_electrolyser_capacity(): 187 """Test ``h2ss.optimisation.electrolyser_capacity``""" 188 n_turbines_list = [25, 50, 75, 100, 125] 189 cap_ratio_list = [0.9, 0.88, 0.86, 0.84, 0.82] 190 e_cap = [] 191 e_cap_func = [] 192 for n_turbines, cap in zip(n_turbines_list, cap_ratio_list): 193 e_cap_func.append( 194 opt.electrolyser_capacity( 195 n_turbines=np.array(n_turbines), cap_ratio=np.array(cap) 196 ) 197 ) 198 owf_cap = n_turbines * opt.REF_RATED_POWER 199 e_cap.append(int(owf_cap * cap)) 200 assert e_cap_func == e_cap
201 202
[docs] 203def test_capex_pipeline(): 204 """Test ``h2ss.optimisation.capex_pipeline``""" 205 e_cap = 500 206 p_rate = 0.006 207 capex = 2 * ( 208 16000000 * e_cap * p_rate / (8.060934075639166 * 15 * np.pi) 209 + 1197200 * np.sqrt(e_cap * p_rate / (8.060934075639166 * 15 * np.pi)) 210 + 329000 211 ) 212 assert round(opt.capex_pipeline(e_cap=e_cap, p_rate=p_rate), 8) == round( 213 capex, 8 214 )
215 216
[docs] 217def test_lcot_pipeline_function(): 218 """Test ``h2ss.optimisation.lcot_pipeline_function``""" 219 capex = 1000 220 transmission_distance = 100 221 ahp = 500 222 opex_ratio = 0.03 223 discount_rate = 0.05 224 lifetime = 40 225 opex = capex * opex_ratio 226 lcot = ( 227 ( 228 capex 229 + sum( 230 opex / np.power((1 + discount_rate), year) 231 for year in range(lifetime + 1) 232 ) 233 ) 234 * transmission_distance 235 / sum( 236 ahp / np.power((1 + discount_rate), year) 237 for year in range(lifetime + 1) 238 ) 239 ) 240 lcot_func = opt.lcot_pipeline_function( 241 capex=capex, 242 d_transmission=transmission_distance, 243 ahp=ahp, 244 opex_ratio=opex_ratio, 245 discount_rate=discount_rate, 246 lifetime=lifetime, 247 ) 248 assert lcot_func == lcot
249 250
[docs] 251def test_lcot_pipeline(): 252 """Test ``h2ss.optimisation.lcot_pipeline``""" 253 weibull_wf_data = pd.DataFrame( 254 { 255 "name": ["Dublin Array", "Codling", "NISA"], 256 "CAPEX": [1000, 2000, 3000], 257 "AHP": [8000, 9000, 10000], 258 } 259 ) 260 cavern_df = pd.DataFrame( 261 { 262 "dist_Dublin_Array": [x + 2 for x in range(20)], 263 "dist_Codling": [x + 3 for x in range(20)], 264 "dist_NISA": [x + 4 for x in range(20)], 265 } 266 ) 267 268 for wf, capex, ahp in zip( 269 list(weibull_wf_data["name"]), 270 list(weibull_wf_data["CAPEX"]), 271 list(weibull_wf_data["AHP"]), 272 ): 273 cavern_df[f"LCOT_{wf.replace(' ', '_')}"] = opt.lcot_pipeline_function( 274 capex=capex, 275 d_transmission=cavern_df[f"dist_{wf.replace(' ', '_')}"], 276 ahp=ahp, 277 ) 278 279 cavern_df_func = opt.lcot_pipeline( 280 weibull_wf_data=weibull_wf_data, cavern_df=cavern_df 281 ) 282 assert_frame_equal(cavern_df_func, cavern_df)