Source code for h2ss.compare

  1"""Functions to compare and validate results.
  2
  3References
  4----------
  5.. [#Deane21] Deane, P. (2021) Our Climate Neutral Future: Zero by 50. Wind
  6    Energy Ireland. Available at:
  7    https://windenergyireland.com/images/files/our-climate-neutral-future-0by50-final-report.pdf
  8    (Accessed: 8 February 2024).
  9.. [#Pashchenko24] Pashchenko, D. (2024) ‘Green hydrogen as a power plant fuel:
 10    What is energy efficiency from production to utilization?’, Renewable
 11    Energy, 223, p. 120033. Available at:
 12    https://doi.org/10.1016/j.renene.2024.120033.
 13.. [#RoyalSociety23] The Royal Society (2023) Large-scale electricity storage.
 14    London: The Royal Society. Available at:
 15    https://royalsociety.org/electricity-storage (Accessed: 15 September 2023).
 16.. [#DECC23] Department of the Environment, Climate and Communications (2023)
 17    National Hydrogen Strategy. Government of Ireland. Available at:
 18    https://www.gov.ie/en/publication/624ab-national-hydrogen-strategy/
 19    (Accessed: 25 July 2023).
 20"""
 21
 22import os
 23import sys
 24
 25import geopandas as gpd
 26import numpy as np
 27
 28from h2ss import capacity as cap
 29from h2ss import data as rd
 30from h2ss import functions as fns
 31from h2ss import optimisation as opt
 32
 33# from h2ss import optimisation as opt
 34
 35
[docs] 36class HiddenPrints: 37 """Suppress print statements: https://stackoverflow.com/a/45669280""" 38 39 def __enter__(self): 40 self._original_stdout = sys.stdout 41 sys.stdout = open(os.devnull, "w") 42 43 def __exit__(self, exc_type, exc_val, exc_tb): 44 sys.stdout.close() 45 sys.stdout = self._original_stdout
46 47
[docs] 48def electricity_demand_ie(data): 49 """Compare the capacity to Ireland's electricity demand in 2050. 50 51 Parameters 52 ---------- 53 data : pandas.Series 54 Pandas series or dataframe column of capacities 55 56 Notes 57 ----- 58 Figures from [#Deane21]_. 59 Assume that the conversion of hydrogen to electricity is 50% efficient; 60 When fuel with 100% H2 is used, higher heating value and lower heating 61 value efficiency are 48.7% and 57.5%, respectively [#Pashchenko24]_. 62 This does not account for transmission losses. 63 Assume that the hydrogen demand is 17% of the electricity demand, based 64 on the Royal Society report on energy storage [#RoyalSociety23]_. 65 """ 66 print( 67 "Energy capacity as a percentage of Ireland's electricity demand\n" 68 "in 2050 (84–122 TWh electricity), assuming a conversion efficiency\n" 69 f"of 50%: " 70 f"{(data.sum() * .5 / 1000 / 122 * 100):.2f}–" 71 f"{(data.sum() * .5 / 1000 / 84 * 100):.2f}%" 72 ) 73 print( 74 "Energy capacity as a percentage of Ireland's hydrogen demand\n" 75 f"in 2050, assuming it is 17% of the electricity demand\n" 76 "(84–122 TWh electricity): " 77 f"{(data.sum() / 1000 / (122 * .17) * 100):.2f}–" 78 f"{(data.sum() / 1000 / (84 * .17) * 100):.2f}%" 79 )
80 81
[docs] 82def hydrogen_demand_ie(data): 83 """Compare the capacity to Ireland's hydrogen demand in 2050. 84 85 Parameters 86 ---------- 87 data : pandas.Series 88 Pandas series or dataframe column of capacities 89 90 Notes 91 ----- 92 Data from the National Hydrogen Strategy [#DECC23]_. 93 """ 94 print( 95 "Energy capacity as a percentage of Ireland's domestic hydrogen\n" 96 "demand in 2050 (4.6–39 TWh hydrogen): " 97 f"{(data.sum() / 1000 / 39 * 100):.2f}–" 98 f"{(data.sum() / 1000 / 4.6 * 100):.2f}%" 99 ) 100 print( 101 "Energy capacity as a percentage of Ireland's domestic and\n" 102 "non-domestic hydrogen demand in 2050 (19.8–74.6 TWh hydrogen): " 103 f"{(data.sum() / 1000 / 74.6 * 100):.2f}–" 104 f"{(data.sum() / 1000 / 19.8 * 100):.2f}%" 105 )
106 107
[docs] 108def distance_from_pipeline(cavern_df, pipeline_data_path): 109 """Calculate the distance of the caverns from the nearest pipeline. 110 111 Parameters 112 ---------- 113 cavern_df : geopandas.GeoDataFrame 114 Dataframe of potential caverns 115 pipeline_data_path : str 116 Path to the offshore pipeline Shapefile data 117 """ 118 pipelines = rd.read_shapefile_from_zip(data_path=pipeline_data_path) 119 pipelines = ( 120 pipelines.to_crs(rd.CRS) 121 .overlay(gpd.GeoDataFrame(geometry=cavern_df.buffer(25000))) 122 .dissolve() 123 ) 124 distances = [] 125 for i in range(len(cavern_df)): 126 distances.append( 127 cavern_df.iloc[[i]] 128 .distance(pipelines["geometry"], align=False) 129 .values[0] 130 ) 131 print( 132 "Distance to nearest pipeline from caverns: " 133 f"{np.min(distances) / 1000:.2f}{np.max(distances) / 1000:.2f} km " 134 f"(mean: {np.mean(distances) / 1000:.2f} km)" 135 )
136 137
[docs] 138def calculate_number_of_caverns(cavern_df, weibull_wf_data): 139 """Calculate the number of caverns required by each wind farm. 140 141 Parameters 142 ---------- 143 cavern_df : geopandas.GeoDataFrame 144 Dataframe of potential caverns 145 weibull_wf_data : pandas.DataFrame 146 Dataframe of the Weibull distribution parameters for the wind farms 147 """ 148 working_mass_cumsum_1 = ( 149 cavern_df.sort_values("working_mass", ascending=False) 150 .reset_index()[["working_mass", "capacity"]] 151 .cumsum() 152 ) 153 working_mass_cumsum_2 = ( 154 cavern_df.sort_values("working_mass") 155 .reset_index()[["working_mass", "capacity"]] 156 .cumsum() 157 ) 158 caverns_low = [] 159 caverns_high = [] 160 cap_max = [] 161 for x in range(len(weibull_wf_data)): 162 print(weibull_wf_data["name"].iloc[x]) 163 print(f"Working mass [kg]: {(weibull_wf_data['AHP'].iloc[x]):.6E}") 164 caverns_low.append( 165 working_mass_cumsum_1.loc[ 166 working_mass_cumsum_1["working_mass"] 167 >= weibull_wf_data["AHP"].iloc[x] 168 ] 169 .head(1) 170 .index[0] 171 + 1 172 ) 173 caverns_high.append( 174 working_mass_cumsum_2.loc[ 175 working_mass_cumsum_2["working_mass"] 176 >= weibull_wf_data["AHP"].iloc[x] 177 ] 178 .head(1) 179 .index[0] 180 + 1 181 ) 182 print( 183 f"Number of caverns required: {caverns_low[x]}{caverns_high[x]}" 184 ) 185 cap_max.append( 186 max( 187 working_mass_cumsum_1.loc[ 188 working_mass_cumsum_1["working_mass"] 189 >= weibull_wf_data["AHP"].iloc[x] 190 ] 191 .head(1)["capacity"] 192 .values[0], 193 working_mass_cumsum_2.loc[ 194 working_mass_cumsum_2["working_mass"] 195 >= weibull_wf_data["AHP"].iloc[x] 196 ] 197 .head(1)["capacity"] 198 .values[0], 199 ) 200 ) 201 print(f"Capacity (approx.) [GWh]: {(cap_max[x]):,.2f}") 202 print("-" * 78) 203 # total number of caverns 204 print( 205 "Total number of caverns required: " 206 f"{sum(caverns_low)}{sum(caverns_high)}" 207 ) 208 print("-" * 78) 209 # number of caverns as a percentage of the total available caverns 210 print( 211 "Number of caverns required as a percentage of all available caverns:" 212 f"\n{(sum(caverns_low) / len(cavern_df) * 100):.2f}–" 213 f"{(sum(caverns_high) / len(cavern_df) * 100):.2f}%" 214 ) 215 print("-" * 78) 216 # total capacity 217 print(f"Total maximum cavern capacity (approx.): {sum(cap_max):,.2f} GWh")
218 219
[docs] 220def load_all_data(keep_orig=False): 221 """Load all input datasets. 222 223 Parameters 224 ---------- 225 keep_orig : bool 226 Whether to keep the original constraints datasets after buffering 227 228 Returns 229 ------- 230 tuple[xarray.Dataset, geopandas.GeoDataFrame, dict[str, geopandas.GeoDataFrame]] 231 The halite data, extent, and exclusions 232 """ 233 ds, extent = rd.kish_basin_data_depth_adjusted( 234 dat_path=os.path.join("data", "kish-basin"), 235 bathymetry_path=os.path.join("data", "bathymetry"), 236 ) 237 238 exclusions = {} 239 240 # exploration wells 241 exclusions["wells"], exclusions["wells_b"] = ( 242 fns.constraint_exploration_well( 243 data_path=os.path.join( 244 "data", 245 "exploration-wells", 246 "Exploration_Wells_Irish_Offshore.shapezip.zip", 247 ) 248 ) 249 ) 250 251 # wind farms 252 exclusions["wind_farms"] = fns.constraint_wind_farm( 253 data_path=os.path.join( 254 "data", "wind-farms", "marine-area-consent-wind.zip" 255 ) 256 ) 257 258 # frequent shipping routes 259 exclusions["shipping"], exclusions["shipping_b"] = ( 260 fns.constraint_shipping_routes( 261 data_path=os.path.join( 262 "data", "shipping", "shipping_frequently_used_routes.zip" 263 ), 264 dat_extent=extent, 265 ) 266 ) 267 268 # shipwrecks 269 exclusions["shipwrecks"], exclusions["shipwrecks_b"] = ( 270 fns.constraint_shipwrecks( 271 data_path=os.path.join( 272 "data", 273 "shipwrecks", 274 "IE_GSI_MI_Shipwrecks_IE_Waters_WGS84_LAT.zip", 275 ), 276 dat_extent=extent, 277 ) 278 ) 279 280 # subsea cables 281 exclusions["cables"], exclusions["cables_b"] = ( 282 fns.constraint_subsea_cables( 283 data_path=os.path.join("data", "subsea-cables", "KIS-ORCA.gpkg"), 284 dat_extent=extent, 285 ) 286 ) 287 288 if not keep_orig: 289 del exclusions["cables"] 290 del exclusions["shipwrecks"] 291 del exclusions["shipping"] 292 del exclusions["wells"] 293 294 return ds, extent, exclusions
295 296
[docs] 297def capacity_function(ds, extent, exclusions, cavern_diameter, cavern_height): 298 """Calculate the energy storage capacity for different cases. 299 300 Parameters 301 ---------- 302 ds : xarray.Dataset 303 Xarray dataset of the halite data 304 extent : geopandas.GeoSeries 305 Extent of the data 306 exclusions : dict[str, geopandas.GeoDataFrame] 307 Dictionary of exclusions data 308 cavern_diameter : float 309 Diameter of the cavern [m] 310 cavern_height : float 311 Height of the cavern [m] 312 313 Returns 314 ------- 315 tuple[geopandas.GeoDataFrame, geopandas.GeoDataFrame] 316 Dataframes of the caverns and zones of interest 317 318 Notes 319 ----- 320 Uses the defaults apart from the changing cavern diameters and heights. 321 """ 322 # distance from salt formation edge 323 edge_buffer = fns.constraint_halite_edge( 324 dat_xr=ds, buffer=cavern_diameter * 3 325 ) 326 327 zones, zds = fns.zones_of_interest( 328 dat_xr=ds, 329 constraints={ 330 "net_height": cavern_height, 331 "min_depth": 500, 332 "max_depth": 2000, 333 }, 334 ) 335 336 caverns = fns.generate_caverns_hexagonal_grid( 337 zones_df=zones, 338 dat_extent=extent, 339 diameter=cavern_diameter, 340 separation=cavern_diameter * 4, 341 ) 342 343 caverns = fns.cavern_dataframe( 344 dat_zone=zds, 345 cavern_df=caverns, 346 depths={"min": 500, "min_opt": 1000, "max_opt": 1500, "max": 2000}, 347 ) 348 349 # label caverns by depth and heights 350 caverns = fns.label_caverns( 351 cavern_df=caverns, 352 heights=[cavern_height], 353 depths={"min": 500, "min_opt": 1000, "max_opt": 1500, "max": 2000}, 354 ) 355 356 with HiddenPrints(): 357 caverns, _ = fns.generate_caverns_with_constraints( 358 cavern_df=caverns, 359 exclusions={ 360 "wells": exclusions["wells_b"], 361 "wind_farms": exclusions["wind_farms"], 362 "shipwrecks": exclusions["shipwrecks_b"], 363 "shipping": exclusions["shipping_b"], 364 "cables": exclusions["cables_b"], 365 "edge": edge_buffer, 366 }, 367 ) 368 369 caverns["cavern_total_volume"] = cap.cavern_volume( 370 height=caverns["cavern_height"], diameter=cavern_diameter 371 ) 372 caverns["cavern_volume"] = cap.corrected_cavern_volume( 373 v_cavern=caverns["cavern_total_volume"] 374 ) 375 376 caverns["t_mid_point"] = cap.temperature_cavern_mid_point( 377 height=caverns["cavern_height"], depth_top=caverns["cavern_depth"] 378 ) 379 380 ( 381 caverns["p_operating_min"], 382 caverns["p_operating_max"], 383 ) = cap.pressure_operating( 384 thickness_overburden=caverns["TopDepthSeabed"], 385 depth_water=-caverns["Bathymetry"], 386 ) 387 388 caverns["rho_min"], caverns["rho_max"] = cap.density_hydrogen_gas( 389 p_operating_min=caverns["p_operating_min"], 390 p_operating_max=caverns["p_operating_max"], 391 t_mid_point=caverns["t_mid_point"], 392 ) 393 394 ( 395 caverns["working_mass"], 396 caverns["mass_operating_min"], 397 caverns["mass_operating_max"], 398 ) = cap.mass_hydrogen_working( 399 rho_h2_min=caverns["rho_min"], 400 rho_h2_max=caverns["rho_max"], 401 v_cavern=caverns["cavern_volume"], 402 ) 403 404 caverns["capacity"] = cap.energy_storage_capacity( 405 m_working=caverns["working_mass"] 406 ) 407 408 caverns["cavern_diameter"] = cavern_diameter 409 410 # df = caverns[["cavern_diameter", "cavern_height", "capacity"]].copy() 411 412 return caverns, zones
413 414
[docs] 415def optimisation_function( 416 ds, extent, exclusions, cavern_diameter, cavern_height 417): 418 """Run all capacity and optimisation functions. 419 420 Parameters 421 ---------- 422 ds : xarray.Dataset 423 Xarray dataset of the halite data 424 extent : geopandas.GeoSeries 425 Extent of the data 426 exclusions : dict[str, geopandas.GeoDataFrame] 427 Dictionary of exclusions data 428 cavern_diameter : float 429 Diameter of the cavern [m] 430 cavern_height : float 431 Height of the cavern [m] 432 433 Returns 434 ------- 435 tuple[geopandas.GeoDataFrame, geopandas.GeoDataFrame, geopandas.GeoDataFrame, geopandas.GeoSeries] 436 Dataframes of the caverns, zones, Weibull parameters, and injection point 437 438 Notes 439 ----- 440 Uses the defaults apart from the changing cavern diameters and heights. 441 """ 442 caverns, zones = capacity_function( 443 ds=ds, 444 extent=extent, 445 exclusions=exclusions, 446 cavern_diameter=cavern_diameter, 447 cavern_height=cavern_height, 448 ) 449 # extract data for wind farms at 150 m 450 weibull_wf_df = fns.read_weibull_data( 451 data_path_weibull=os.path.join( 452 "data", 453 "weibull-parameters-wind-speeds", 454 "Weibull_150m_params_ITM.zip", 455 ), 456 data_path_wind_farms=os.path.join( 457 "data", "wind-farms", "marine-area-consent-wind.zip" 458 ), 459 ) 460 # number of 15 MW turbines, rounded down to the nearest integer 461 weibull_wf_df["n_turbines"] = opt.number_of_turbines( 462 owf_cap=weibull_wf_df["cap"] 463 ) 464 weibull_wf_df = opt.annual_energy_production(weibull_wf_data=weibull_wf_df) 465 weibull_wf_df["AHP"] = opt.annual_hydrogen_production( 466 aep=weibull_wf_df["AEP"] 467 ) 468 caverns, injection_point = opt.transmission_distance( 469 cavern_df=caverns, wf_data=exclusions["wind_farms"] 470 ) 471 weibull_wf_df["E_cap"] = opt.electrolyser_capacity( 472 n_turbines=weibull_wf_df["n_turbines"] 473 ) 474 with HiddenPrints(): 475 weibull_wf_df["CAPEX"] = opt.capex_pipeline( 476 e_cap=weibull_wf_df["E_cap"] 477 ) 478 caverns = opt.lcot_pipeline( 479 weibull_wf_data=weibull_wf_df, cavern_df=caverns 480 ) 481 return caverns, zones, weibull_wf_df, injection_point